# Import the necessary packages
import pandas as pd
import numpy as np
import tensorflow as tf
from tensorflow.python.keras import Sequential
from tensorflow.keras import layers, optimizers
from tensorflow.keras.applications import DenseNet121
from tensorflow.keras.applications.resnet50 import ResNet50
from tensorflow.keras.layers import *
from tensorflow.keras.models import Model, load_model
from tensorflow.keras.initializers import glorot_uniform
from tensorflow.keras.utils import plot_model
from tensorflow.keras.callbacks import ReduceLROnPlateau, EarlyStopping, ModelCheckpoint, LearningRateScheduler
from IPython.display import display
from tensorflow.keras import backend as K
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from keras import optimizers
# load the data
facialpoints_df = pd.read_csv('KeyFacialPoints.csv')
facialpoints_df
| left_eye_center_x | left_eye_center_y | right_eye_center_x | right_eye_center_y | left_eye_inner_corner_x | left_eye_inner_corner_y | left_eye_outer_corner_x | left_eye_outer_corner_y | right_eye_inner_corner_x | right_eye_inner_corner_y | ... | nose_tip_y | mouth_left_corner_x | mouth_left_corner_y | mouth_right_corner_x | mouth_right_corner_y | mouth_center_top_lip_x | mouth_center_top_lip_y | mouth_center_bottom_lip_x | mouth_center_bottom_lip_y | Image | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 66.033564 | 39.002274 | 30.227008 | 36.421678 | 59.582075 | 39.647423 | 73.130346 | 39.969997 | 36.356571 | 37.389402 | ... | 57.066803 | 61.195308 | 79.970165 | 28.614496 | 77.388992 | 43.312602 | 72.935459 | 43.130707 | 84.485774 | 238 236 237 238 240 240 239 241 241 243 240 23... |
| 1 | 64.332936 | 34.970077 | 29.949277 | 33.448715 | 58.856170 | 35.274349 | 70.722723 | 36.187166 | 36.034723 | 34.361532 | ... | 55.660936 | 56.421447 | 76.352000 | 35.122383 | 76.047660 | 46.684596 | 70.266553 | 45.467915 | 85.480170 | 219 215 204 196 204 211 212 200 180 168 178 19... |
| 2 | 65.057053 | 34.909642 | 30.903789 | 34.909642 | 59.412000 | 36.320968 | 70.984421 | 36.320968 | 37.678105 | 36.320968 | ... | 53.538947 | 60.822947 | 73.014316 | 33.726316 | 72.732000 | 47.274947 | 70.191789 | 47.274947 | 78.659368 | 144 142 159 180 188 188 184 180 167 132 84 59 ... |
| 3 | 65.225739 | 37.261774 | 32.023096 | 37.261774 | 60.003339 | 39.127179 | 72.314713 | 38.380967 | 37.618643 | 38.754115 | ... | 54.166539 | 65.598887 | 72.703722 | 37.245496 | 74.195478 | 50.303165 | 70.091687 | 51.561183 | 78.268383 | 193 192 193 194 194 194 193 192 168 111 50 12 ... |
| 4 | 66.725301 | 39.621261 | 32.244810 | 38.042032 | 58.565890 | 39.621261 | 72.515926 | 39.884466 | 36.982380 | 39.094852 | ... | 64.889521 | 60.671411 | 77.523239 | 31.191755 | 76.997301 | 44.962748 | 73.707387 | 44.227141 | 86.871166 | 147 148 160 196 215 214 216 217 219 220 206 18... |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 2135 | 67.180378 | 35.816373 | 33.239956 | 34.921932 | 59.347973 | 37.000904 | 72.667896 | 37.097600 | 39.404349 | 36.589944 | ... | 60.065396 | 64.397610 | 73.248393 | 35.446431 | 74.014748 | 48.708626 | 76.760852 | 48.559612 | 77.335618 | 191 191 191 190 189 185 184 120 54 34 19 24 31... |
| 2136 | 65.724490 | 36.301020 | 25.377551 | 37.311224 | 58.530612 | 37.739796 | 74.448980 | 37.525510 | 33.551020 | 38.107143 | ... | 62.472789 | 66.928121 | 79.229046 | 28.015377 | 81.151722 | 48.771976 | 81.816774 | 49.287271 | 82.871156 | 19 19 19 18 13 7 3 4 3 1 3 9 11 12 12 13 10 9 ... |
| 2137 | 68.430866 | 38.651975 | 28.895857 | 37.617027 | 61.659350 | 40.100902 | 75.586792 | 40.219182 | 37.665118 | 39.087245 | ... | 63.289576 | 68.864397 | 77.495823 | 22.013981 | 77.368995 | 49.180628 | 79.043130 | 47.176739 | 89.544522 | 31 40 47 31 54 58 63 100 86 80 82 75 79 86 90 ... |
| 2138 | 64.152180 | 30.691592 | 27.000898 | 40.868082 | 56.505624 | 34.126963 | 73.436776 | 28.556335 | 34.746122 | 40.506939 | ... | 59.735799 | 73.730743 | 70.273886 | 38.777143 | 80.684286 | 58.042857 | 79.301429 | 58.611086 | 80.355543 | 7 1 5 1 3 20 12 0 5 8 6 18 13 9 8 12 12 11 4 8... |
| 2139 | 66.683755 | 34.483429 | 30.784490 | 38.578939 | 59.255347 | 36.065143 | 73.942694 | 34.624653 | 37.478531 | 39.398041 | ... | 59.269388 | 72.600433 | 71.862041 | 34.232759 | 77.339429 | 51.599453 | 75.963592 | 52.923371 | 82.661062 | 68 19 19 23 19 26 23 16 8 15 9 9 17 16 15 27 2... |
2140 rows × 31 columns
facialpoints_df.info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 2140 entries, 0 to 2139 Data columns (total 31 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 left_eye_center_x 2140 non-null float64 1 left_eye_center_y 2140 non-null float64 2 right_eye_center_x 2140 non-null float64 3 right_eye_center_y 2140 non-null float64 4 left_eye_inner_corner_x 2140 non-null float64 5 left_eye_inner_corner_y 2140 non-null float64 6 left_eye_outer_corner_x 2140 non-null float64 7 left_eye_outer_corner_y 2140 non-null float64 8 right_eye_inner_corner_x 2140 non-null float64 9 right_eye_inner_corner_y 2140 non-null float64 10 right_eye_outer_corner_x 2140 non-null float64 11 right_eye_outer_corner_y 2140 non-null float64 12 left_eyebrow_inner_end_x 2140 non-null float64 13 left_eyebrow_inner_end_y 2140 non-null float64 14 left_eyebrow_outer_end_x 2140 non-null float64 15 left_eyebrow_outer_end_y 2140 non-null float64 16 right_eyebrow_inner_end_x 2140 non-null float64 17 right_eyebrow_inner_end_y 2140 non-null float64 18 right_eyebrow_outer_end_x 2140 non-null float64 19 right_eyebrow_outer_end_y 2140 non-null float64 20 nose_tip_x 2140 non-null float64 21 nose_tip_y 2140 non-null float64 22 mouth_left_corner_x 2140 non-null float64 23 mouth_left_corner_y 2140 non-null float64 24 mouth_right_corner_x 2140 non-null float64 25 mouth_right_corner_y 2140 non-null float64 26 mouth_center_top_lip_x 2140 non-null float64 27 mouth_center_top_lip_y 2140 non-null float64 28 mouth_center_bottom_lip_x 2140 non-null float64 29 mouth_center_bottom_lip_y 2140 non-null float64 30 Image 2140 non-null object dtypes: float64(30), object(1) memory usage: 518.4+ KB
# Let's take a look at a sample image
facialpoints_df['Image'][1]
'219 215 204 196 204 211 212 200 180 168 178 196 194 196 203 209 199 192 197 201 207 215 199 190 182 180 183 190 190 176 175 175 170 158 162 170 150 133 133 128 121 115 115 114 112 114 113 112 120 127 123 118 112 109 112 110 107 106 104 104 100 98 105 122 135 143 152 135 114 104 81 74 87 107 98 82 84 79 70 69 62 62 79 94 97 100 98 93 100 100 97 96 93 92 88 84 222 219 220 211 207 199 185 178 171 175 196 199 199 200 201 205 202 200 204 206 211 219 204 192 188 185 185 193 189 180 177 178 173 158 159 162 145 133 133 129 122 118 116 115 114 115 115 118 127 132 127 120 116 119 118 114 114 113 110 108 105 104 110 130 140 149 152 128 106 106 91 78 90 107 101 84 90 93 79 75 73 59 58 74 94 102 100 99 97 90 94 96 95 92 88 86 231 224 212 197 191 186 176 170 172 190 201 199 200 203 204 210 215 211 210 213 213 217 210 192 188 188 189 194 189 183 181 183 175 158 158 158 142 132 133 131 125 120 121 120 116 115 116 122 135 137 129 122 121 125 121 119 120 118 117 112 112 109 116 136 149 156 149 126 109 108 98 91 97 110 109 89 93 99 90 82 79 68 60 53 60 82 97 104 100 93 92 91 85 77 80 84 216 207 199 196 186 178 176 169 187 195 196 200 198 205 201 209 223 220 213 215 216 218 207 192 190 196 196 193 190 186 189 186 168 155 159 154 141 137 132 129 125 118 120 124 126 122 120 128 132 133 125 119 122 123 125 123 121 120 118 116 114 117 126 142 156 156 142 120 114 116 100 93 104 119 112 93 95 102 98 90 87 77 66 62 48 49 71 91 102 106 96 79 68 65 67 72 216 208 200 190 180 178 188 191 191 192 197 187 191 200 193 205 224 225 218 219 220 219 208 198 203 204 197 196 193 191 196 186 161 151 157 150 140 137 132 129 125 125 121 121 124 125 124 124 129 129 121 121 123 125 124 122 122 121 121 120 117 118 125 146 155 146 131 113 113 114 104 91 104 120 112 95 93 106 99 90 96 83 75 76 67 46 46 72 90 106 107 81 61 57 53 58 200 187 181 179 179 188 190 184 180 188 189 182 188 192 189 205 225 225 224 223 220 215 208 205 208 203 197 199 200 197 194 174 154 146 148 146 141 139 135 131 127 124 122 121 121 122 119 119 125 123 119 120 121 120 122 124 122 122 120 119 121 118 126 146 145 130 117 108 115 115 105 95 103 116 112 98 96 106 105 93 96 92 76 77 80 62 45 55 73 83 100 93 60 46 39 49 199 195 194 190 187 183 187 191 191 185 178 180 190 193 195 209 223 225 225 226 219 211 207 210 209 201 198 202 204 198 187 160 148 143 142 145 144 143 136 133 130 125 123 125 125 122 120 124 121 116 120 117 113 115 119 121 120 117 117 118 118 121 127 138 128 119 111 106 110 114 106 97 106 115 109 98 98 106 108 99 100 93 74 72 78 75 52 50 69 71 85 96 72 50 35 40 212 202 196 187 183 186 189 192 190 182 173 180 194 194 200 212 222 223 226 226 216 206 206 214 210 198 200 205 206 196 170 151 144 141 143 144 144 137 135 133 133 132 129 129 128 125 125 122 119 120 119 114 110 118 119 121 119 114 119 117 120 122 126 127 112 107 108 106 110 113 107 98 102 112 105 97 97 108 109 101 102 95 77 69 71 65 57 53 60 65 70 79 72 52 44 42 188 181 174 179 189 195 185 178 178 178 177 188 195 188 196 212 221 225 229 224 214 208 206 210 207 202 205 205 197 175 154 146 140 140 137 136 132 129 128 128 131 130 130 127 123 124 123 123 120 121 115 111 115 116 117 118 118 120 117 115 117 119 118 111 102 95 101 107 109 112 110 106 100 105 103 91 91 103 109 102 101 98 79 72 75 60 53 47 55 61 63 70 69 49 36 46 176 175 179 182 180 174 164 164 172 173 182 191 185 179 195 217 224 224 224 224 216 211 212 206 203 207 206 194 173 157 148 144 141 136 129 130 131 132 130 131 130 127 124 124 124 122 119 116 118 119 115 115 117 115 113 116 118 116 118 116 112 118 116 104 99 101 108 109 109 111 116 109 100 105 102 86 84 97 104 97 104 102 82 72 74 69 58 50 53 63 65 64 66 62 52 59 152 155 158 154 148 149 155 161 164 172 175 178 177 179 198 220 229 228 229 227 218 216 211 203 207 204 189 171 159 154 147 143 140 136 130 130 129 129 128 129 127 126 123 122 123 123 120 120 122 119 117 119 117 117 118 121 120 116 118 120 118 116 115 109 106 109 114 112 110 112 115 110 103 102 101 91 85 95 99 95 105 103 83 69 69 74 67 61 61 66 67 63 66 73 74 78 146 146 143 143 148 147 150 152 157 163 160 171 176 179 201 226 237 237 236 229 221 219 214 209 198 180 166 164 159 152 153 149 142 139 132 130 128 128 128 127 127 126 124 122 121 122 123 119 120 118 118 119 119 118 119 120 120 121 119 123 120 118 116 112 115 115 116 114 114 115 112 110 107 101 99 93 92 91 94 91 104 108 83 68 67 76 79 71 67 67 69 75 75 71 70 82 161 145 136 137 138 130 127 131 141 152 163 176 178 183 204 229 240 241 237 233 223 218 213 205 189 178 178 176 168 161 155 152 145 139 139 136 132 128 130 129 128 129 126 122 119 121 121 118 117 118 119 121 121 120 120 123 125 119 121 123 122 120 119 120 118 119 120 119 117 111 110 109 106 102 101 98 92 90 85 84 102 108 93 80 79 83 83 78 76 81 80 77 71 69 68 73 149 134 132 135 130 125 121 121 133 148 165 178 181 191 215 236 243 242 238 234 223 214 203 194 190 185 187 187 178 169 163 155 149 144 143 138 133 130 130 130 127 130 128 122 119 122 121 120 118 119 122 124 119 120 121 122 122 123 123 121 120 119 121 120 120 119 119 120 118 114 112 111 109 106 104 94 94 93 86 86 96 103 102 96 95 94 94 97 93 78 60 54 51 48 51 54 137 133 134 137 132 120 113 118 129 145 163 178 182 195 221 240 244 243 239 233 226 216 204 197 195 193 193 194 192 184 177 166 158 152 147 143 136 133 131 131 130 128 129 124 123 124 124 123 123 124 123 123 122 121 122 120 118 119 121 120 118 121 119 118 121 121 120 119 116 120 117 112 111 110 106 99 99 96 90 86 89 91 93 97 97 98 99 88 68 54 48 39 29 32 40 46 140 138 144 149 135 110 104 117 123 139 163 177 179 196 227 243 245 245 242 237 230 220 212 205 204 201 202 203 201 198 187 175 166 158 154 147 141 136 135 134 133 133 129 128 126 126 130 129 127 126 122 120 121 125 119 117 121 121 122 122 123 122 121 121 120 122 120 119 119 119 121 117 112 113 110 108 102 97 88 87 88 88 91 92 97 92 81 65 57 55 38 23 31 39 49 63 135 138 135 137 136 121 111 104 118 137 157 172 179 199 229 244 246 244 243 240 232 224 219 215 208 207 207 204 201 197 191 179 168 161 156 152 146 141 138 134 135 136 131 130 131 132 130 129 129 128 124 124 125 125 123 122 122 124 121 123 124 122 124 125 122 121 121 120 119 119 120 120 115 113 112 109 103 100 94 88 88 83 83 86 84 71 59 49 43 33 25 31 45 61 76 91 145 145 133 126 116 119 122 108 114 131 156 171 178 203 231 244 245 246 246 241 236 227 223 217 209 205 203 202 199 191 186 178 169 162 158 153 146 142 140 139 137 137 135 133 133 132 129 129 127 129 127 122 125 127 126 124 126 125 127 129 126 127 126 124 124 121 120 122 120 120 121 121 119 117 116 113 107 99 92 88 86 80 77 71 57 46 41 34 25 19 24 36 50 70 94 103 150 142 133 123 119 122 119 113 110 125 146 171 185 208 235 245 246 247 245 240 239 233 223 216 210 204 195 196 189 176 172 168 162 152 150 151 146 140 139 138 140 141 136 136 132 130 130 129 130 130 129 128 127 130 128 128 126 127 130 130 130 126 122 122 122 121 118 119 118 118 119 118 119 117 117 114 108 103 94 87 83 78 69 55 42 31 28 31 25 18 25 35 49 67 89 99 138 138 131 125 122 116 107 105 110 119 144 168 187 215 236 245 246 245 244 242 237 235 223 213 206 196 184 177 168 160 156 153 148 140 143 145 141 139 137 138 140 136 135 135 132 131 128 128 131 133 131 131 129 126 133 136 131 126 125 127 125 125 121 119 119 118 117 114 113 112 113 116 114 115 119 116 112 104 99 89 81 73 63 50 34 25 23 26 32 29 32 41 50 65 78 89 138 131 121 120 124 120 113 118 117 115 139 165 184 214 239 245 245 244 244 243 239 234 223 206 191 173 160 154 145 139 133 131 129 123 127 132 131 133 134 137 136 135 133 134 135 131 127 126 129 130 130 131 129 129 131 135 135 126 124 127 123 119 120 119 115 117 115 109 108 110 111 110 113 114 115 116 111 106 101 89 76 65 60 48 28 32 35 34 36 35 36 41 51 64 73 86 132 123 114 114 110 106 108 115 106 104 133 160 182 216 242 246 244 244 243 241 237 226 203 169 144 124 115 112 103 96 93 92 90 88 94 98 106 113 122 124 129 132 130 131 133 133 130 128 127 125 129 129 131 132 131 134 134 128 121 123 126 119 116 116 110 106 105 106 103 107 110 107 108 112 113 114 110 105 99 88 70 62 53 39 30 35 48 53 52 47 39 43 52 60 70 82 118 116 115 113 105 97 93 86 85 98 126 153 181 222 245 248 246 243 240 235 218 182 138 106 87 77 72 67 59 52 48 46 47 44 47 54 60 74 91 105 115 116 123 129 134 134 127 124 124 127 129 130 134 129 128 127 129 127 115 111 109 108 105 99 97 92 87 90 91 94 96 106 110 110 111 109 110 104 100 94 74 57 45 31 26 30 38 53 65 66 63 59 54 55 62 78 116 111 121 115 105 96 85 77 81 94 122 148 186 230 247 246 243 241 225 192 155 122 102 94 87 73 61 56 51 42 35 35 34 28 30 35 38 47 67 83 92 101 114 124 131 131 129 124 125 129 128 130 131 124 121 124 120 111 99 85 76 69 68 67 62 59 57 59 65 65 69 77 94 106 108 110 106 104 102 95 77 54 37 24 16 16 14 26 40 53 68 83 81 71 71 82 109 115 119 112 100 82 72 73 80 91 118 148 192 238 248 246 243 223 183 157 155 159 152 145 128 106 86 78 73 63 54 45 41 41 44 48 50 49 61 73 84 90 101 115 123 127 126 125 127 128 130 129 125 117 116 114 95 74 58 43 34 27 29 32 29 26 26 36 41 42 42 43 60 86 107 109 104 104 103 98 78 56 35 18 9 10 16 25 34 41 53 78 96 90 84 90 109 121 119 113 92 75 73 76 75 87 113 145 206 245 248 247 231 191 178 200 209 200 189 172 151 136 122 112 106 102 94 83 70 65 65 70 73 68 69 78 85 89 93 107 123 127 124 128 128 127 127 126 123 108 97 89 71 48 32 24 21 28 33 36 33 29 31 31 32 34 37 41 44 58 84 100 105 102 102 97 82 58 32 16 13 14 22 29 34 38 45 56 74 91 93 95 119 125 126 110 90 79 73 72 75 88 103 150 220 248 247 246 215 195 218 232 222 202 187 174 162 147 136 125 116 112 109 109 105 99 96 97 98 96 93 93 93 93 97 108 125 128 129 127 127 124 123 122 117 99 82 73 59 46 39 38 40 46 52 54 48 46 49 53 55 53 55 55 54 52 52 72 94 101 101 99 81 57 34 15 16 20 26 34 41 45 46 51 58 69 89 96 131 130 119 107 92 78 68 65 72 80 94 161 234 248 248 243 220 219 235 234 219 209 193 172 162 153 139 129 124 118 112 108 109 105 102 105 106 111 114 112 112 113 115 124 136 134 132 134 130 129 127 121 113 94 79 71 66 63 61 66 71 75 79 86 88 88 91 93 92 90 87 81 76 67 55 48 66 88 105 102 82 59 35 16 10 11 16 24 34 50 60 59 54 57 69 88 156 140 121 107 92 77 67 63 62 67 98 177 240 250 247 244 234 232 233 227 215 206 192 169 149 137 126 116 112 107 100 99 98 96 100 106 108 111 114 113 115 125 142 151 153 148 141 139 136 126 123 120 116 100 85 81 80 81 87 90 90 94 97 104 108 107 108 103 101 107 106 96 90 85 80 67 50 65 96 107 90 63 40 16 2 1 6 16 23 34 52 66 65 57 61 78 167 155 139 119 95 79 64 57 47 64 112 185 239 248 248 244 240 238 233 227 214 199 179 146 117 100 88 72 62 59 56 55 54 56 66 83 102 111 111 109 108 118 148 172 172 160 148 140 135 123 119 118 109 97 87 84 85 89 93 91 91 93 94 96 98 100 99 99 101 101 101 99 100 100 97 92 76 60 84 104 94 68 46 20 3 2 4 16 26 34 40 54 62 62 63 69 167 161 147 135 116 90 61 37 38 68 125 193 242 250 248 246 241 238 232 220 207 187 143 102 92 86 66 40 29 23 19 22 30 33 31 38 65 96 106 108 108 104 132 177 192 173 150 145 133 125 121 118 104 87 80 82 85 89 89 80 69 62 64 67 71 72 72 74 80 88 90 100 103 103 100 101 99 73 80 105 99 68 41 19 3 1 2 9 20 30 39 46 55 66 71 71 176 163 147 135 125 111 69 25 38 86 137 208 246 249 249 246 242 238 230 218 196 149 98 80 86 120 131 80 23 4 4 8 20 36 43 30 34 64 92 105 107 97 106 163 205 192 163 143 129 123 119 109 93 79 76 82 84 81 67 46 30 32 36 37 41 40 43 47 52 55 68 83 93 95 97 107 104 90 90 104 98 70 37 12 1 1 2 7 12 20 28 37 46 55 71 76 178 171 159 137 119 111 75 21 46 106 157 220 247 248 247 246 244 239 228 215 172 110 92 90 136 228 238 135 42 10 1 17 39 66 85 59 35 40 73 93 104 101 96 140 201 198 167 143 121 114 113 99 85 76 75 77 70 56 35 25 27 32 19 9 9 7 10 21 37 51 54 56 79 95 100 103 105 95 99 108 102 72 37 10 1 3 11 19 23 23 29 32 34 43 63 75 180 170 161 148 124 93 52 38 70 112 174 228 247 249 247 245 245 242 229 200 144 113 143 139 179 250 248 142 50 26 18 27 36 82 120 89 54 45 59 78 93 97 96 133 202 206 166 138 117 112 108 94 82 78 75 69 54 43 37 36 52 44 17 3 0 1 9 27 51 52 45 48 62 84 100 103 105 99 103 109 105 73 35 6 1 2 11 22 31 35 38 40 43 45 54 74 192 176 164 156 136 94 49 54 69 120 189 233 248 249 246 246 246 243 224 182 147 163 206 193 158 188 215 159 52 38 58 42 41 92 113 87 65 56 65 79 88 98 102 137 206 217 175 140 124 117 107 89 77 83 78 69 57 47 44 58 86 61 21 11 2 5 21 48 92 96 61 36 45 69 93 105 104 101 104 109 105 75 38 7 1 1 2 8 15 24 35 48 55 52 54 77 197 192 178 169 149 114 78 51 66 134 195 235 249 248 247 246 247 244 229 203 193 205 228 237 181 129 135 143 88 47 47 46 64 83 72 54 49 49 60 87 101 106 105 148 211 219 180 147 126 119 107 89 79 79 77 68 56 47 42 59 90 72 25 19 25 16 20 69 119 118 84 54 49 64 87 99 104 106 106 111 106 77 42 9 0 1 1 1 2 8 22 35 50 58 63 92 198 206 191 179 163 127 97 80 91 143 199 238 248 247 245 244 242 237 230 222 216 207 213 228 211 168 133 112 95 82 70 69 70 65 59 49 45 55 67 93 114 114 119 170 218 212 177 144 126 119 106 88 82 87 91 74 53 44 37 41 60 64 38 26 32 23 36 95 124 96 75 72 65 70 85 98 106 106 108 110 111 82 45 12 1 1 3 8 8 8 13 24 37 54 82 109 205 212 208 184 155 113 96 112 128 164 207 239 249 248 245 245 240 231 223 223 215 201 196 207 209 190 160 129 103 99 89 83 79 74 75 70 70 74 83 93 107 122 148 194 219 208 172 142 129 120 110 93 83 87 90 82 72 60 49 43 49 55 55 47 40 42 60 84 91 84 85 84 80 78 87 99 105 110 109 112 110 86 51 16 1 1 2 12 21 18 15 20 41 67 97 105 206 221 216 181 135 96 110 134 153 178 216 243 249 247 246 243 239 228 221 217 216 202 193 193 193 185 168 144 128 118 108 100 95 90 88 86 84 79 80 87 103 133 177 211 224 206 165 141 127 121 114 99 86 83 84 78 70 67 61 58 58 64 71 73 73 69 71 77 85 94 97 101 97 90 91 99 110 111 112 117 110 84 54 23 3 1 5 10 14 22 30 45 69 84 92 94 208 226 218 159 112 112 130 149 168 190 222 245 247 246 245 241 233 223 215 210 204 194 185 183 180 170 158 143 130 119 113 108 101 96 97 92 87 83 85 99 124 166 203 226 232 205 167 139 127 120 115 103 91 87 82 74 70 68 69 68 69 72 81 88 91 92 91 95 100 100 101 101 98 101 100 103 112 114 114 117 112 87 56 24 4 3 8 10 14 29 55 73 69 63 85 79 220 236 199 127 91 117 144 157 178 201 228 245 247 247 246 241 231 220 215 211 196 181 171 167 163 155 147 133 126 117 113 111 107 106 103 99 97 98 108 133 170 204 224 237 239 208 167 139 128 122 116 106 93 89 86 83 79 78 76 77 80 79 82 83 89 93 94 97 97 97 98 96 102 107 110 110 110 116 116 115 115 93 59 24 3 7 14 21 31 45 55 55 39 57 90 60 228 219 152 109 81 113 159 170 195 213 235 246 247 248 245 241 233 221 211 202 190 173 160 156 152 147 139 134 128 124 123 116 115 112 105 105 111 129 155 180 208 226 238 246 247 214 169 142 125 120 118 109 97 94 92 93 90 85 82 86 88 87 87 86 91 90 92 94 95 97 100 106 108 111 115 114 115 114 115 115 114 94 66 31 3 4 16 21 21 18 11 15 41 84 81 43 209 173 125 117 77 114 162 172 185 215 236 246 248 247 247 245 236 226 214 202 187 167 154 148 147 144 139 129 124 124 121 120 117 115 117 122 141 169 196 215 231 243 246 250 249 220 174 140 125 118 116 108 99 100 99 99 97 95 90 90 97 95 94 97 99 100 99 100 104 104 107 109 111 114 116 117 114 116 115 115 115 98 73 44 10 1 3 5 2 0 0 16 65 87 56 34 176 161 135 131 109 140 177 173 166 199 235 245 247 247 248 247 242 235 225 210 196 180 164 152 145 140 138 130 122 118 118 116 117 123 134 150 175 199 219 231 244 247 248 248 245 220 176 142 125 121 120 110 103 105 104 102 104 107 100 97 100 101 98 99 107 108 105 108 110 113 116 116 119 117 119 120 119 120 117 116 114 100 75 47 18 3 2 3 4 1 10 45 78 66 39 32 176 162 134 141 148 169 185 173 161 188 234 244 245 248 249 250 247 243 235 218 205 192 175 156 142 135 130 129 123 116 118 118 128 141 153 171 193 217 227 238 246 247 246 244 241 220 181 149 127 122 120 111 106 104 108 105 103 108 111 108 103 103 101 103 112 112 118 117 115 116 118 119 119 121 120 118 121 118 119 114 110 99 74 51 32 16 12 8 5 13 38 62 62 45 34 33 197 173 156 162 173 172 168 167 166 199 231 242 245 247 249 250 249 248 243 232 217 200 180 166 152 141 135 129 123 123 125 128 136 147 159 178 203 222 232 240 246 246 245 239 231 210 180 151 134 121 117 114 104 102 109 113 109 110 115 113 111 108 105 107 113 112 114 121 122 116 119 120 120 122 125 124 123 122 120 117 108 97 72 49 32 24 21 10 11 33 48 53 38 26 33 33 235 217 199 187 176 165 159 158 184 216 235 241 244 245 247 248 249 248 246 240 228 206 183 171 156 147 140 133 127 128 133 135 138 147 160 183 205 223 237 240 244 245 241 231 214 194 172 150 131 122 120 115 108 102 108 114 116 115 115 118 116 115 112 113 115 108 109 114 114 114 120 123 125 126 124 125 124 121 121 115 110 98 79 53 22 19 19 19 29 36 40 29 14 20 32 39 239 213 187 168 161 159 145 126 177 229 234 241 242 243 246 248 247 246 244 239 226 210 190 169 161 154 145 138 134 134 136 135 136 145 164 188 206 223 236 240 244 243 234 218 201 182 163 148 130 123 117 115 110 102 106 115 121 119 118 120 120 120 120 122 120 116 113 115 116 117 122 124 123 128 125 125 124 122 120 114 107 92 77 63 30 17 21 23 25 29 22 3 10 23 29 42 218 190 154 130 148 155 121 89 161 230 236 240 243 243 245 245 246 244 241 234 219 211 195 174 170 163 153 143 140 141 139 138 135 143 163 184 208 223 230 238 245 243 229 209 193 172 150 136 131 123 118 116 111 106 103 114 121 122 124 120 119 119 121 120 120 122 118 120 123 126 126 127 129 128 127 127 126 123 119 111 101 86 75 65 40 17 9 17 27 22 5 3 21 26 33 53 208 177 115 89 141 152 109 68 141 229 239 243 241 243 243 246 244 240 236 229 222 211 198 187 180 170 158 150 146 143 147 144 143 150 163 177 196 212 219 237 245 240 228 208 189 166 145 133 124 123 121 118 114 106 107 113 119 123 127 125 121 123 124 122 122 122 124 127 124 127 128 130 130 129 129 126 126 121 118 109 97 77 70 67 45 15 0 7 18 14 8 13 25 27 36 66 215 175 107 89 142 148 100 60 83 202 241 243 242 241 242 243 240 238 234 229 227 217 204 194 186 176 165 157 151 147 146 149 152 161 161 167 181 191 211 237 243 236 229 218 195 166 148 133 126 123 120 118 110 106 103 108 118 120 125 126 128 126 125 125 127 125 125 122 121 122 128 127 128 130 127 124 122 121 113 107 92 62 65 71 46 8 1 0 1 4 6 9 22 29 47 69 235 183 107 84 139 139 97 74 64 152 232 242 242 240 240 241 241 239 232 229 226 217 205 197 190 182 170 160 155 151 151 149 152 157 154 152 158 172 208 235 238 237 243 235 202 172 149 135 126 119 120 119 115 110 104 103 113 121 123 125 129 128 130 131 128 126 127 125 125 122 125 130 128 129 132 126 119 116 111 109 85 52 64 72 38 3 1 1 1 1 2 10 25 40 55 73 241 187 111 84 123 149 127 124 110 120 209 240 240 241 241 240 239 236 230 227 227 218 209 202 192 186 173 163 157 156 156 150 148 147 145 143 141 156 200 231 233 241 252 247 212 179 152 138 126 126 125 118 116 111 105 95 106 116 122 130 129 129 129 130 128 123 129 132 132 127 126 130 128 127 124 120 117 111 108 102 79 48 59 66 25 1 1 1 2 3 11 23 33 45 60 79 232 192 117 83 93 133 150 175 154 139 198 237 240 241 241 239 239 237 231 228 224 218 214 207 196 185 173 161 160 165 156 152 147 141 139 134 124 132 196 230 236 244 252 250 222 183 157 139 131 130 127 118 113 112 107 96 98 113 121 126 130 128 128 130 127 129 127 131 130 128 130 131 131 128 124 122 118 111 107 96 57 30 51 52 16 0 1 0 1 10 28 41 41 53 68 78 213 191 132 89 73 99 148 194 208 205 208 226 240 240 238 240 240 235 228 225 225 218 210 208 200 189 178 173 174 164 155 154 146 139 133 131 110 119 199 236 241 246 251 247 228 196 163 142 136 133 131 123 114 115 113 98 93 109 121 125 128 127 126 126 128 130 133 133 130 132 133 133 131 129 126 122 117 112 105 81 40 27 39 40 11 0 1 4 12 26 41 48 55 68 75 84 194 188 139 93 61 86 146 187 218 210 183 200 235 241 238 238 237 234 231 225 223 219 211 205 202 193 183 183 182 169 158 151 148 139 131 131 98 112 207 243 240 243 241 238 230 204 171 148 139 136 133 126 114 112 108 99 98 109 122 128 129 131 123 123 125 130 136 135 134 132 133 130 127 129 124 116 113 109 102 71 46 45 50 40 12 4 10 17 27 40 51 57 66 79 86 97 164 178 147 100 66 70 139 173 193 194 164 182 229 241 240 238 236 234 227 227 225 220 215 204 201 194 187 181 173 164 159 159 148 139 133 126 104 121 215 240 230 226 219 217 221 205 175 152 142 134 131 120 110 108 106 103 100 113 124 128 130 129 126 124 126 131 131 135 134 129 131 128 127 123 122 113 107 110 100 76 68 65 59 29 10 13 19 24 32 43 45 56 76 89 97 109 132 156 149 107 73 54 123 165 178 185 169 178 222 239 239 239 237 234 228 228 227 220 210 203 199 197 191 184 175 164 162 157 146 138 131 125 110 126 203 229 211 200 203 210 213 199 170 151 140 133 126 109 103 101 105 102 101 114 127 127 128 129 129 129 131 132 131 133 131 130 128 127 124 121 114 107 108 106 103 99 93 73 39 8 8 9 10 9 16 28 44 67 84 91 96 107 118 128 140 120 85 61 78 134 171 183 172 176 214 237 238 240 237 234 231 231 229 224 218 206 202 203 198 194 178 170 164 153 144 135 135 132 125 123 162 203 183 139 125 179 209 190 157 137 129 128 117 100 97 105 106 99 103 116 125 128 130 131 135 135 130 134 135 131 130 126 123 123 122 119 111 106 108 108 101 101 102 76 32 3 3 1 0 9 24 44 61 74 80 84 97 107 137 110 110 118 94 69 53 76 125 163 171 171 197 232 238 238 240 236 232 231 230 229 222 215 210 204 200 193 179 168 163 156 143 135 135 140 149 140 135 151 157 132 112 145 181 174 143 125 124 116 100 84 84 92 96 101 104 114 123 127 130 132 133 133 132 130 129 131 132 125 123 122 119 115 112 108 105 104 97 96 100 79 22 2 2 3 11 29 47 64 75 76 80 89 103 111 177 141 103 89 83 71 58 36 45 77 110 116 155 226 242 237 238 236 234 232 231 230 228 222 212 209 206 197 184 169 156 146 141 135 137 157 171 173 151 135 153 178 185 170 160 159 145 131 119 105 93 86 80 78 89 101 111 118 122 126 130 135 134 135 134 132 130 127 128 123 122 119 117 113 108 105 108 101 94 93 93 60 10 0 3 10 25 42 59 69 82 85 92 98 103 114 194 165 138 107 76 53 34 22 20 29 51 40 81 208 243 238 236 236 235 235 231 229 230 224 215 209 210 201 181 169 154 143 140 142 156 176 188 193 184 168 162 181 200 198 168 135 126 118 105 97 94 91 80 86 96 107 114 117 119 124 130 131 130 131 132 135 130 126 125 121 121 118 114 110 105 106 107 103 95 84 62 20 0 4 11 22 35 54 75 88 93 96 96 98 108 124 202 177 154 140 121 89 59 44 42 55 67 41 56 195 244 236 237 236 234 231 232 231 227 224 217 213 208 196 178 166 156 145 145 162 177 190 196 200 202 194 184 182 199 204 178 129 106 108 106 103 100 97 97 100 104 112 114 116 121 126 130 131 131 132 135 134 127 125 126 121 117 117 116 111 107 106 104 88 59 33 11 6 10 22 36 49 64 80 92 94 89 91 95 100 118 131 204 184 162 145 135 127 113 92 78 66 67 41 45 177 244 241 239 236 234 232 232 232 230 226 221 218 204 190 181 173 164 157 168 181 190 195 198 201 207 207 195 185 191 200 177 134 113 119 116 108 108 109 108 109 110 113 115 117 120 124 128 132 133 131 131 133 131 125 123 120 116 117 114 113 109 110 107 56 7 1 4 14 28 41 52 64 70 79 83 84 87 94 105 118 128 132 220 191 166 152 140 130 117 104 91 80 72 52 40 157 241 241 238 238 235 233 229 229 231 229 224 217 204 194 190 181 175 184 194 196 195 194 198 197 206 207 194 190 193 198 172 133 117 126 127 116 111 110 111 115 111 113 116 117 116 118 126 131 131 129 128 129 128 125 121 117 114 115 113 110 108 108 100 39 0 2 11 27 37 44 57 67 74 73 78 87 97 109 121 129 130 126 248 225 184 158 149 136 122 108 95 84 76 59 29 131 236 242 239 237 236 232 230 229 230 230 223 214 209 206 199 190 194 204 212 209 194 195 194 193 198 200 194 194 197 196 165 134 116 125 129 124 114 110 112 117 115 114 117 118 115 116 119 127 129 126 128 125 126 123 119 118 113 113 111 109 106 110 92 28 5 15 27 35 43 53 64 67 72 80 90 102 119 129 130 128 126 125 255 254 238 199 162 146 137 120 104 91 82 71 43 111 225 243 238 237 236 237 232 231 233 231 222 213 210 207 204 211 214 217 220 210 195 191 188 185 192 195 194 199 202 192 163 129 109 116 127 126 114 110 113 114 114 115 117 117 117 119 120 123 127 128 126 125 125 121 119 116 115 114 111 109 106 110 74 13 11 33 46 46 41 39 43 50 60 80 105 117 123 122 121 119 116 114 254 254 255 255 227 178 144 139 129 124 114 104 83 107 210 243 239 238 235 236 234 232 233 230 226 222 216 210 214 223 223 227 221 207 195 190 187 183 190 194 195 202 209 199 161 120 100 105 121 122 117 111 110 111 113 115 114 118 118 118 120 119 122 123 122 123 120 121 118 111 113 113 111 106 108 103 53 8 25 43 45 38 26 12 14 21 41 92 120 121 120 120 120 119 117 114 254 254 253 254 255 247 199 158 151 143 130 115 84 89 189 241 241 240 239 236 236 236 235 230 227 224 220 221 226 227 229 228 218 204 193 194 186 187 193 196 197 201 215 210 171 125 100 103 114 119 113 110 110 108 111 111 113 118 118 120 119 118 116 118 121 118 119 119 115 114 112 112 109 109 111 92 34 11 40 56 38 21 14 8 13 22 48 106 131 128 129 126 123 123 120 120 254 254 254 254 253 255 251 210 169 140 112 93 78 64 154 235 239 237 237 237 236 236 237 235 233 228 223 225 230 230 229 227 218 204 195 195 192 192 195 204 201 204 218 214 183 141 118 116 120 121 118 110 103 104 106 112 116 119 117 115 114 116 111 117 121 119 119 120 117 117 114 110 104 105 105 66 11 9 34 50 43 27 16 15 27 36 65 116 130 130 127 124 120 122 117 117 254 254 254 254 254 253 255 242 180 134 116 106 97 61 113 219 240 237 237 238 239 238 237 238 237 230 223 227 230 227 226 230 222 211 202 201 203 203 209 220 216 208 204 187 158 137 124 114 112 111 113 114 112 106 104 112 115 116 116 113 114 112 113 113 115 117 120 122 121 115 114 112 109 106 97 49 17 25 32 31 29 25 30 37 37 51 94 131 132 132 132 128 126 120 122 125 254 254 254 254 254 252 252 255 210 139 123 118 106 76 83 190 239 236 238 238 239 238 237 241 240 234 224 229 231 227 225 226 223 216 208 208 217 221 224 224 201 175 158 136 118 114 111 98 93 93 96 106 115 113 111 111 114 114 115 114 110 110 111 110 111 114 119 119 117 114 114 113 108 106 79 36 29 32 33 31 31 31 35 46 48 55 90 123 133 132 131 126 128 126 127 132 254 254 253 253 252 255 255 206 126 139 143 120 97 82 81 161 229 237 237 240 239 238 240 242 242 238 231 229 229 225 225 225 223 220 213 209 221 217 199 176 148 130 122 106 95 101 104 88 83 83 87 92 100 111 117 118 114 114 114 113 113 112 112 113 114 114 116 120 117 115 116 109 108 98 61 34 35 40 39 42 42 42 41 40 38 27 18 39 79 114 128 128 127 129 126 125 252 252 255 255 255 212 137 45 33 99 128 112 96 89 78 138 215 232 235 237 239 240 240 242 243 239 234 230 224 220 227 227 222 212 199 194 192 174 148 124 114 107 95 84 70 78 88 77 67 73 81 83 88 94 110 120 117 116 116 112 113 113 114 115 117 117 121 119 115 119 115 108 108 73 25 19 29 31 32 32 30 25 18 13 7 1 0 0 5 35 77 118 136 137 133 131 255 255 241 198 124 81 53 42 20 60 118 113 97 80 67 122 196 224 232 235 237 238 242 241 243 241 237 231 219 213 223 224 211 193 170 153 146 133 112 96 84 74 56 39 32 39 59 60 43 37 49 70 79 85 96 106 110 113 113 113 111 113 117 120 116 117 123 121 116 116 111 111 96 38 1 1 3 3 3 2 2 1 0 0 0 1 2 2 0 0 3 32 82 125 138 136 224 164 103 85 84 72 51 21 4 33 99 121 116 70 54 111 193 227 226 234 236 239 241 240 241 240 237 228 211 208 220 219 205 180 149 117 98 88 82 73 49 24 12 10 9 16 27 32 18 3 8 36 55 61 64 76 87 97 102 109 113 114 118 121 118 117 119 120 117 115 112 108 67 9 0 1 1 1 0 1 1 1 2 1 1 1 1 1 1 2 0 0 5 39 89 120 100 81 95 95 76 47 23 8 3 14 64 112 128 69 44 103 188 231 227 230 235 239 239 240 239 238 233 227 217 213 217 214 207 198 179 142 98 91 107 94 55 25 15 9 5 12 18 14 5 1 7 21 39 46 49 57 76 94 100 111 116 116 117 119 122 117 119 118 116 111 108 90 31 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 0 0 12 47 98 101 90 73 49 24 8 2 1 4 29 84 122 82 36 80 181 236 228 228 235 237 238 238 237 234 230 224 216 210 210 204 202 209 217 207 166 149 168 144 93 65 46 34 28 27 25 23 29 37 41 46 55 68 71 80 94 100 105 110 113 116 115 118 121 118 118 120 116 111 102 57 8 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 0 0 99 88 75 55 31 12 3 1 1 2 3 43 91 90 55 67 173 241 233 225 230 232 232 234 234 231 227 221 214 204 202 200 204 211 208 209 206 185 182 161 124 98 75 64 57 53 51 53 64 70 66 67 73 79 86 95 103 107 108 108 115 116 117 118 118 119 118 117 115 111 81 24 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 1 92 79 60 37 16 4 2 1 1 1 0 6 32 56 60 69 162 241 242 227 228 232 234 233 231 229 223 219 214 206 201 199 205 206 197 189 194 191 168 151 149 129 95 83 78 68 70 80 85 85 78 79 85 87 92 94 101 109 107 109 112 114 116 116 116 118 116 113 110 95 47 5 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 86 74 50 24 7 2 1 1 1 1 1 1 2 16 44 67 157 238 244 235 226 226 231 232 230 223 219 212 206 201 196 198 203 201 189 178 176 183 172 148 151 143 108 98 96 86 81 92 98 94 89 90 89 92 92 99 107 111 108 108 107 111 113 115 115 113 110 110 102 69 18 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 79 64 39 15 3 1 1 1 1 1 1 1 0 14 46 50 138 237 243 237 226 224 227 229 225 219 215 209 205 199 193 196 196 194 187 177 174 176 176 158 143 134 117 103 101 101 93 98 103 99 96 87 89 98 107 111 112 110 102 102 103 108 111 113 114 112 110 107 93 45 4 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 72 52 27 7 1 1 1 1 1 1 1 1 2 21 46 36 119 234 245 239 232 224 224 224 221 219 215 213 209 200 191 189 190 186 182 176 175 172 166 163 154 137 122 105 99 100 98 100 98 95 90 89 97 106 114 112 110 106 101 99 104 108 107 109 113 111 106 100 79 28 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 64 44 17 3 1 1 1 1 1 1 1 1 2 25 49 25 104 226 243 238 233 227 221 219 220 218 218 215 211 203 198 194 188 185 187 185 182 170 151 143 139 136 125 111 102 103 104 103 100 97 102 108 114 115 110 108 106 104 103 101 105 106 107 109 106 106 100 98 74 18 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 56 32 9 2 1 1 1 1 1 1 1 1 3 28 51 19 87 219 241 236 231 228 223 218 218 220 216 215 210 207 204 199 196 194 195 189 178 160 142 131 120 115 115 110 106 108 106 108 107 110 116 117 118 113 109 107 107 105 104 104 107 108 106 104 103 101 96 95 68 15 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 47 19 4 1 1 1 1 1 1 1 1 1 4 32 51 14 76 215 240 232 230 229 226 218 213 215 216 214 210 211 212 205 202 202 196 186 171 151 129 122 120 113 107 104 104 102 103 107 109 111 112 113 113 112 108 104 105 106 107 108 106 105 102 104 102 96 96 92 59 11 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 33 10 1 1 1 1 1 1 1 1 1 1 6 35 50 12 61 205 237 229 228 228 226 222 212 205 210 212 214 216 215 214 211 210 200 182 163 147 133 123 123 121 114 109 101 101 104 109 110 109 115 114 111 112 109 106 105 108 109 105 102 100 100 99 94 94 97 93 50 7 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 19 3 1 1 1 1 1 1 1 1 1 1 8 34 44 8 44 193 238 227 225 226 226 222 213 205 201 205 210 215 217 218 214 212 205 190 169 149 138 133 130 131 128 120 120 119 118 122 118 115 118 117 115 115 115 111 109 108 103 100 96 97 96 95 96 97 99 91 44 2 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 13 2 1 1 1 1 1 1 1 1 1 1 9 34 39 8 28 172 238 228 225 224 223 221 217 210 201 198 201 209 215 217 218 217 212 199 181 160 145 142 140 139 134 131 129 125 123 123 120 116 120 120 123 121 120 118 110 105 103 98 94 90 90 95 98 97 98 87 33 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 5 1 1 1 1 1 1 1 1 1 1 1 10 32 37 8 18 156 237 227 222 220 219 216 212 208 207 201 197 200 209 217 221 220 216 207 192 166 155 153 153 148 140 133 129 129 127 126 124 121 121 122 127 126 122 119 114 110 101 93 89 87 96 101 100 98 96 83 28 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 1 1 1 1 1 1 1 1 1 1 1 10 29 32 10 11 139 234 227 223 219 216 214 209 209 209 208 202 200 206 213 219 220 216 209 195 174 163 164 162 156 145 131 130 132 137 132 130 125 121 125 127 127 122 119 118 112 97 87 87 93 98 103 99 95 97 76 21 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 1 1 1 1 1 1 1 1 1 1 1 10 27 29 10 6 125 231 228 223 219 211 207 209 208 209 210 208 203 202 209 214 218 215 205 191 178 172 168 162 156 148 140 135 136 138 134 132 126 122 124 128 126 122 122 115 101 88 85 91 97 100 100 99 97 95 70 14 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 11 25 24 8 2 118 228 227 220 215 208 203 202 204 206 211 209 203 199 204 210 213 209 199 190 180 172 164 159 154 150 142 138 140 143 140 133 128 127 125 126 124 121 115 106 94 87 88 91 97 101 100 99 96 96 64 11 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 9 22 18 5 0 109 225 226 219 212 208 201 198 200 204 209 210 208 203 199 202 204 197 189 189 181 166 160 154 150 145 139 135 135 139 138 129 127 125 126 126 125 119 109 97 86 88 93 95 100 96 99 101 96 95 58 6 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 7 20 20 7 0 101 221 229 216 211 206 200 199 196 201 206 209 208 205 200 198 192 179 172 176 170 162 155 149 139 135 131 128 131 131 129 125 122 122 121 122 121 111 99 89 87 90 93 97 97 94 97 101 98 89 43 3 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 9 18 16 8 0 88 215 228 218 213 206 199 193 190 197 202 207 212 206 199 196 189 174 163 159 156 151 146 138 131 127 124 125 127 127 122 116 118 119 117 113 110 100 93 93 90 92 93 99 99 100 100 98 97 87 31 0 2 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1'
# Since values for the image is given as space separated string, we will need to separate the values using ' ' as separator.
# Then convert this into numpy array using np.fromstring and convert the obtained 1D array into 2D array of shape (96,96)
facialpoints_df['Image'] = facialpoints_df['Image'].apply(lambda x: np.fromstring(x, dtype= int, sep = ' ').reshape(96,96))
# Let's obtain the shape of the resized image
facialpoints_df['Image'][1].shape
(96, 96)
# Let's confirm that there are no null values
facialpoints_df.isnull().sum()
left_eye_center_x 0 left_eye_center_y 0 right_eye_center_x 0 right_eye_center_y 0 left_eye_inner_corner_x 0 left_eye_inner_corner_y 0 left_eye_outer_corner_x 0 left_eye_outer_corner_y 0 right_eye_inner_corner_x 0 right_eye_inner_corner_y 0 right_eye_outer_corner_x 0 right_eye_outer_corner_y 0 left_eyebrow_inner_end_x 0 left_eyebrow_inner_end_y 0 left_eyebrow_outer_end_x 0 left_eyebrow_outer_end_y 0 right_eyebrow_inner_end_x 0 right_eyebrow_inner_end_y 0 right_eyebrow_outer_end_x 0 right_eyebrow_outer_end_y 0 nose_tip_x 0 nose_tip_y 0 mouth_left_corner_x 0 mouth_left_corner_y 0 mouth_right_corner_x 0 mouth_right_corner_y 0 mouth_center_top_lip_x 0 mouth_center_top_lip_y 0 mouth_center_bottom_lip_x 0 mouth_center_bottom_lip_y 0 Image 0 dtype: int64
MINI CHALLENGE #1
facialpoints_df.describe()
| left_eye_center_x | left_eye_center_y | right_eye_center_x | right_eye_center_y | left_eye_inner_corner_x | left_eye_inner_corner_y | left_eye_outer_corner_x | left_eye_outer_corner_y | right_eye_inner_corner_x | right_eye_inner_corner_y | ... | nose_tip_x | nose_tip_y | mouth_left_corner_x | mouth_left_corner_y | mouth_right_corner_x | mouth_right_corner_y | mouth_center_top_lip_x | mouth_center_top_lip_y | mouth_center_bottom_lip_x | mouth_center_bottom_lip_y | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| count | 2140.000000 | 2140.000000 | 2140.000000 | 2140.000000 | 2140.000000 | 2140.000000 | 2140.000000 | 2140.000000 | 2140.000000 | 2140.000000 | ... | 2140.000000 | 2140.000000 | 2140.000000 | 2140.000000 | 2140.000000 | 2140.000000 | 2140.000000 | 2140.000000 | 2140.000000 | 2140.000000 |
| mean | 66.221549 | 36.842274 | 29.640269 | 37.063815 | 59.272128 | 37.856014 | 73.412473 | 37.640110 | 36.603107 | 37.920852 | ... | 47.952141 | 57.253926 | 63.419076 | 75.887660 | 32.967365 | 76.134065 | 48.081325 | 72.681125 | 48.149654 | 82.630412 |
| std | 2.087683 | 2.294027 | 2.051575 | 2.234334 | 2.005631 | 2.034500 | 2.701639 | 2.684162 | 1.822784 | 2.009505 | ... | 3.276053 | 4.528635 | 3.650131 | 4.438565 | 3.595103 | 4.259514 | 2.723274 | 5.108675 | 3.032389 | 4.813557 |
| min | 47.835757 | 23.832996 | 18.922611 | 24.773072 | 41.779381 | 27.190098 | 52.947144 | 26.250023 | 24.112624 | 26.250023 | ... | 24.472590 | 41.558400 | 43.869480 | 57.023258 | 9.778137 | 56.690208 | 32.260312 | 56.719043 | 33.047605 | 57.232296 |
| 25% | 65.046300 | 35.468842 | 28.472224 | 35.818377 | 58.113054 | 36.607950 | 71.741978 | 36.102409 | 35.495730 | 36.766783 | ... | 46.495330 | 54.466000 | 61.341291 | 72.874263 | 30.879288 | 73.280038 | 46.580004 | 69.271669 | 46.492000 | 79.417480 |
| 50% | 66.129065 | 36.913319 | 29.655440 | 37.048085 | 59.327154 | 37.845220 | 73.240045 | 37.624207 | 36.620735 | 37.920336 | ... | 47.900511 | 57.638582 | 63.199057 | 75.682465 | 33.034022 | 75.941985 | 47.939031 | 72.395978 | 47.980854 | 82.388899 |
| 75% | 67.332093 | 38.286438 | 30.858673 | 38.333884 | 60.521492 | 39.195431 | 74.978684 | 39.308331 | 37.665280 | 39.143921 | ... | 49.260657 | 60.303524 | 65.302398 | 78.774969 | 35.063575 | 78.884031 | 49.290000 | 75.840286 | 49.551936 | 85.697976 |
| max | 78.013082 | 46.132421 | 42.495172 | 45.980981 | 69.023030 | 47.190316 | 87.032252 | 49.653825 | 47.293746 | 44.887301 | ... | 65.279654 | 75.992731 | 84.767123 | 94.673637 | 50.973348 | 93.443176 | 61.804506 | 93.916338 | 62.438095 | 95.808983 |
8 rows × 30 columns
# Plot a random image from the dataset along with facial keypoints.
i = np.random.randint(1, len(facialpoints_df))
plt.imshow(facialpoints_df['Image'][i],cmap='gray')
<matplotlib.image.AxesImage at 0x1c029bd9280>
# The (x, y) coordinates for the 15 key features are plotted on top of the image
# Below is a for loop starting from index = 1 to 32 with step of 2
# In the first iteration j would be 1, followed by 3 and so on.
# since x-coordinates are in even columns like 0,2,4,.. and y-coordinates are in odd columns like 1,3,5,..
# we access their value using .loc command, which get the values for coordinates of the image based on the column it is refering to.
# in the first iteration df[i][j-1] would be df[i][0] refering the value in 1st column(x-coordinate) of the image in 'i' row.
plt.figure()
plt.imshow(facialpoints_df['Image'][i],cmap='gray')
for j in range(1,31,2):
plt.plot(facialpoints_df.loc[i][j-1], facialpoints_df.loc[i][j], 'rx')
import random
# Let's view more images in a grid format
fig = plt.figure(figsize=(20, 20))
for i in range(16):
ax = fig.add_subplot(4, 4, i + 1)
image = plt.imshow(facialpoints_df['Image'][i], cmap = 'gray')
for j in range(1,31,2):
plt.plot(facialpoints_df.loc[i][j-1], facialpoints_df.loc[i][j], 'rx')
MINI CHALLENGE #2
import random
# Let's view more images in a grid format
fig = plt.figure(figsize=(20, 20))
for i in range(64):
ax = fig.add_subplot(8, 8, i + 1)
img = np.random.randint(1, len(facialpoints_df))
image = plt.imshow(facialpoints_df['Image'][img], cmap = 'gray')
for j in range(1,31,2):
plt.plot(facialpoints_df.loc[img][j-1], facialpoints_df.loc[img][j], 'rx')
# Create a new copy of the dataframe
import copy
facialpoints_df_copy = copy.copy(facialpoints_df)
# obtain the header of the DataFrame (names of columns)
columns = facialpoints_df_copy.columns[:-1]
columns
Index(['left_eye_center_x', 'left_eye_center_y', 'right_eye_center_x',
'right_eye_center_y', 'left_eye_inner_corner_x',
'left_eye_inner_corner_y', 'left_eye_outer_corner_x',
'left_eye_outer_corner_y', 'right_eye_inner_corner_x',
'right_eye_inner_corner_y', 'right_eye_outer_corner_x',
'right_eye_outer_corner_y', 'left_eyebrow_inner_end_x',
'left_eyebrow_inner_end_y', 'left_eyebrow_outer_end_x',
'left_eyebrow_outer_end_y', 'right_eyebrow_inner_end_x',
'right_eyebrow_inner_end_y', 'right_eyebrow_outer_end_x',
'right_eyebrow_outer_end_y', 'nose_tip_x', 'nose_tip_y',
'mouth_left_corner_x', 'mouth_left_corner_y', 'mouth_right_corner_x',
'mouth_right_corner_y', 'mouth_center_top_lip_x',
'mouth_center_top_lip_y', 'mouth_center_bottom_lip_x',
'mouth_center_bottom_lip_y'],
dtype='object')
# Take a look at the pixel values of a sample image and see if it makes sense!
facialpoints_df['Image'][0]
array([[238, 236, 237, ..., 250, 250, 250],
[235, 238, 236, ..., 249, 250, 251],
[237, 236, 237, ..., 251, 251, 250],
...,
[186, 183, 181, ..., 52, 57, 60],
[189, 188, 207, ..., 61, 69, 78],
[191, 184, 184, ..., 70, 75, 90]])
# plot the sample image
plt.imshow(facialpoints_df['Image'][0], cmap = 'gray')
<matplotlib.image.AxesImage at 0x1c02a5a6ee0>
# Now Let's flip the image column horizontally
facialpoints_df_copy['Image'] = facialpoints_df_copy['Image'].apply(lambda x: np.flip(x, axis = 1))
# Now take a look at the flipped image and do a sanity check!
# Notice that the values of pixels are now flipped
facialpoints_df_copy['Image'][0]
array([[250, 250, 250, ..., 237, 236, 238],
[251, 250, 249, ..., 236, 238, 235],
[250, 251, 251, ..., 237, 236, 237],
...,
[ 60, 57, 52, ..., 181, 183, 186],
[ 78, 69, 61, ..., 207, 188, 189],
[ 90, 75, 70, ..., 184, 184, 191]])
# Notice that the image is flipped now
plt.imshow(facialpoints_df_copy['Image'][0], cmap = 'gray')
<matplotlib.image.AxesImage at 0x1c02a63acd0>
# Since we are flipping the images horizontally, y coordinate values would be the same
# X coordinate values only would need to change, all we have to do is to subtract our initial x-coordinate values from width of the image(96)
for i in range(len(columns)):
if i%2 == 0:
facialpoints_df_copy[columns[i]] = facialpoints_df_copy[columns[i]].apply(lambda x: 96. - float(x) )
# View the Original image
plt.imshow(facialpoints_df['Image'][0],cmap='gray')
for j in range(1, 31, 2):
plt.plot(facialpoints_df.loc[0][j-1], facialpoints_df.loc[0][j], 'rx')
# View the Horizontally flipped image
plt.imshow(facialpoints_df_copy['Image'][0], cmap='gray')
for j in range(1, 31, 2):
plt.plot(facialpoints_df_copy.loc[0][j-1], facialpoints_df_copy.loc[0][j], 'rx')
# Concatenate the original dataframe with the augmented dataframe
facialpoints_df_augmented = np.concatenate((facialpoints_df,facialpoints_df_copy))
facialpoints_df_augmented.shape
(4280, 31)
# Let's try to perform another image augmentation by randomly increasing images brightness
# We multiply pixel values by random values between 1 and 2 to increase the brightness of the image
# we clip the value between 0 and 255
import random
facialpoints_df_copy = copy.copy(facialpoints_df)
facialpoints_df_copy['Image'] = facialpoints_df['Image'].apply(lambda x:np.clip(random.uniform(1, 2) * x, 0.0, 255.0))
facialpoints_df_augmented = np.concatenate((facialpoints_df_augmented, facialpoints_df_copy))
facialpoints_df_augmented.shape
(6420, 31)
# Let's view image with increased brightness
plt.imshow(facialpoints_df_copy['Image'][0], cmap = 'gray')
for j in range(1, 31, 2):
plt.plot(facialpoints_df_copy.loc[0][j-1], facialpoints_df_copy.loc[0][j], 'rx')
MINI CHALLENGE #3
# Randomly decrease image brightness
# Multiply pixel values by random values between 0 and 1 to decrease the brightness of the image
# Clip the value between 0 and 255
facialpoints_df_copy = copy.copy(facialpoints_df)
facialpoints_df_copy['Image'] = facialpoints_df['Image'].apply(lambda x:np.clip(random.uniform(0, 0.2) * x, 0.0, 255.0))
# facialpoints_df_augmented = np.concatenate((facialpoints_df_augmented, facialpoints_df_copy))
# facialpoints_df_augmented.shape
facialpoints_df['Image'][0]
array([[238, 236, 237, ..., 250, 250, 250],
[235, 238, 236, ..., 249, 250, 251],
[237, 236, 237, ..., 251, 251, 250],
...,
[186, 183, 181, ..., 52, 57, 60],
[189, 188, 207, ..., 61, 69, 78],
[191, 184, 184, ..., 70, 75, 90]])
facialpoints_df_copy['Image'][0]
array([[30.97241284, 30.71214046, 30.84227665, ..., 32.5340471 ,
32.5340471 , 32.5340471 ],
[30.58200427, 30.97241284, 30.71214046, ..., 32.40391091,
32.5340471 , 32.66418329],
[30.84227665, 30.71214046, 30.84227665, ..., 32.66418329,
32.66418329, 32.5340471 ],
...,
[24.20533104, 23.81492248, 23.5546501 , ..., 6.7670818 ,
7.41776274, 7.8081713 ],
[24.59573961, 24.46560342, 26.938191 , ..., 7.93830749,
8.979397 , 10.1506227 ],
[24.85601198, 23.94505867, 23.94505867, ..., 9.10953319,
9.76021413, 11.71225696]])
# Let's view a sample image with decreased brightness decreased image
plt.imshow(facialpoints_df_copy['Image'][0], cmap = 'gray')
for j in range(1,31,2):
plt.plot(facialpoints_df_copy.loc[0][j-1], facialpoints_df_copy.loc[0][j], 'rx')
MINI CHALLENGE #4
facialpoints_df_copy = copy.copy(facialpoints_df)
# Flip the image column vertically (note that axis = 0)
facialpoints_df_copy['Image'] = facialpoints_df_copy['Image'].apply(lambda x: np.flip(x, axis = 0))
facialpoints_df['Image'][0]
array([[238, 236, 237, ..., 250, 250, 250],
[235, 238, 236, ..., 249, 250, 251],
[237, 236, 237, ..., 251, 251, 250],
...,
[186, 183, 181, ..., 52, 57, 60],
[189, 188, 207, ..., 61, 69, 78],
[191, 184, 184, ..., 70, 75, 90]])
facialpoints_df_copy['Image'][0]
array([[191, 184, 184, ..., 70, 75, 90],
[189, 188, 207, ..., 61, 69, 78],
[186, 183, 181, ..., 52, 57, 60],
...,
[237, 236, 237, ..., 251, 251, 250],
[235, 238, 236, ..., 249, 250, 251],
[238, 236, 237, ..., 250, 250, 250]])
# Since we are flipping the images vertically, x coordinate values would be the same
# y coordinate values only would need to change, all we have to do is to subtract our initial y-coordinate values from width of the image(96)
for i in range(len(columns)):
if i%2 == 1:
facialpoints_df_copy[columns[i]] = facialpoints_df_copy[columns[i]].apply(lambda x: 96. - float(x) )
# View the Horizontally flipped image
plt.imshow(facialpoints_df_copy['Image'][0], cmap='gray')
for j in range(1, 31, 2):
plt.plot(facialpoints_df_copy.loc[0][j-1], facialpoints_df_copy.loc[0][j], 'rx')
# Obtain the value of 'Images' and normalize it
# Note that 'Images' are in the 31st column but since indexing start from 0, we refer 31st column by 30
img = facialpoints_df_augmented[:, 30]
img = img/255.
# Create an empty array of shape (10700, 96, 96, 1) to train the model
X = np.empty((len(img), 96, 96, 1))
# Iterate through the normalized images list and add image values to the empty array
# Note that we need to expand it's dimension from (96,96) to (96,96,1)
for i in range(len(img)):
X[i,] = np.expand_dims(img[i], axis = 2)
# Convert the array type to float32
X = np.asarray(X).astype(np.float32)
X.shape
(6420, 96, 96, 1)
# Obtain the values of key face points coordinates, which are to used as target.
y = facialpoints_df_augmented[:,:30]
y = np.asarray(y).astype(np.float32)
y.shape
(6420, 30)
# Split the data into training and testing data
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.1)
MINI CHALLENGE #5
X_train.shape
(5778, 96, 96, 1)
# Let's view more images in a grid format
fig = plt.figure(figsize=(20, 20))
for i in range(64):
ax = fig.add_subplot(8, 8, i + 1)
image = plt.imshow(X_train[i].reshape(96,96), cmap = 'gray')
for j in range(1,31,2):
plt.plot(y_train[i][j-1], y_train[i][j], 'rx')
def res_block(X, filter, stage):
# CONVOLUTIONAL BLOCK
X_copy = X
f1 , f2, f3 = filter
# Main Path
X = Conv2D(f1, (1,1), strides = (1,1), name ='res_'+str(stage)+'_conv_a', kernel_initializer= glorot_uniform(seed = 0))(X)
X = MaxPool2D((2,2))(X)
X = BatchNormalization(axis =3, name = 'bn_'+str(stage)+'_conv_a')(X)
X = Activation('relu')(X)
X = Conv2D(f2, kernel_size = (3,3), strides =(1,1), padding = 'same', name ='res_'+str(stage)+'_conv_b', kernel_initializer= glorot_uniform(seed = 0))(X)
X = BatchNormalization(axis =3, name = 'bn_'+str(stage)+'_conv_b')(X)
X = Activation('relu')(X)
X = Conv2D(f3, kernel_size = (1,1), strides =(1,1),name ='res_'+str(stage)+'_conv_c', kernel_initializer= glorot_uniform(seed = 0))(X)
X = BatchNormalization(axis =3, name = 'bn_'+str(stage)+'_conv_c')(X)
# Short path
X_copy = Conv2D(f3, kernel_size = (1,1), strides =(1,1),name ='res_'+str(stage)+'_conv_copy', kernel_initializer= glorot_uniform(seed = 0))(X_copy)
X_copy = MaxPool2D((2,2))(X_copy)
X_copy = BatchNormalization(axis =3, name = 'bn_'+str(stage)+'_conv_copy')(X_copy)
# Add data from main and short paths
X = Add()([X,X_copy])
X = Activation('relu')(X)
# IDENTITY BLOCK 1
X_copy = X
# Main Path
X = Conv2D(f1, (1,1),strides = (1,1), name ='res_'+str(stage)+'_identity_1_a', kernel_initializer= glorot_uniform(seed = 0))(X)
X = BatchNormalization(axis =3, name = 'bn_'+str(stage)+'_identity_1_a')(X)
X = Activation('relu')(X)
X = Conv2D(f2, kernel_size = (3,3), strides =(1,1), padding = 'same', name ='res_'+str(stage)+'_identity_1_b', kernel_initializer= glorot_uniform(seed = 0))(X)
X = BatchNormalization(axis =3, name = 'bn_'+str(stage)+'_identity_1_b')(X)
X = Activation('relu')(X)
X = Conv2D(f3, kernel_size = (1,1), strides =(1,1),name ='res_'+str(stage)+'_identity_1_c', kernel_initializer= glorot_uniform(seed = 0))(X)
X = BatchNormalization(axis =3, name = 'bn_'+str(stage)+'_identity_1_c')(X)
# Add both paths together (Note that we feed the original input as is hence the name "identity")
X = Add()([X,X_copy])
X = Activation('relu')(X)
# IDENTITY BLOCK 2
X_copy = X
# Main Path
X = Conv2D(f1, (1,1),strides = (1,1), name ='res_'+str(stage)+'_identity_2_a', kernel_initializer= glorot_uniform(seed = 0))(X)
X = BatchNormalization(axis =3, name = 'bn_'+str(stage)+'_identity_2_a')(X)
X = Activation('relu')(X)
X = Conv2D(f2, kernel_size = (3,3), strides =(1,1), padding = 'same', name ='res_'+str(stage)+'_identity_2_b', kernel_initializer= glorot_uniform(seed = 0))(X)
X = BatchNormalization(axis =3, name = 'bn_'+str(stage)+'_identity_2_b')(X)
X = Activation('relu')(X)
X = Conv2D(f3, kernel_size = (1,1), strides =(1,1),name ='res_'+str(stage)+'_identity_2_c', kernel_initializer= glorot_uniform(seed = 0))(X)
X = BatchNormalization(axis =3, name = 'bn_'+str(stage)+'_identity_2_c')(X)
# Add both paths together (Note that we feed the original input as is hence the name "identity")
X = Add()([X,X_copy])
X = Activation('relu')(X)
return X
input_shape = (96,96,1)
# Input tensor shape
X_input = Input(input_shape)
# Zero-padding
X = ZeroPadding2D((3,3))(X_input)
# Stage #1
X = Conv2D(64, (7,7), strides= (2,2), name = 'conv1', kernel_initializer= glorot_uniform(seed = 0))(X)
X = BatchNormalization(axis =3, name = 'bn_conv1')(X)
X = Activation('relu')(X)
X = MaxPooling2D((3,3), strides= (2,2))(X)
# Stage #2
X = res_block(X, filter= [64,64,256], stage= 2)
# Stage #3
X = res_block(X, filter= [128,128,512], stage= 3)
# Average Pooling
X = AveragePooling2D((2,2), name = 'Averagea_Pooling')(X)
# Final layer
X = Flatten()(X)
X = Dense(4096, activation = 'relu')(X)
X = Dropout(0.2)(X)
X = Dense(2048, activation = 'relu')(X)
X = Dropout(0.1)(X)
X = Dense(30, activation = 'relu')(X)
model = Model( inputs= X_input, outputs = X)
model.summary()
Model: "model"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_1 (InputLayer) [(None, 96, 96, 1)] 0 []
zero_padding2d (ZeroPaddin (None, 102, 102, 1) 0 ['input_1[0][0]']
g2D)
conv1 (Conv2D) (None, 48, 48, 64) 3200 ['zero_padding2d[0][0]']
bn_conv1 (BatchNormalizati (None, 48, 48, 64) 256 ['conv1[0][0]']
on)
activation (Activation) (None, 48, 48, 64) 0 ['bn_conv1[0][0]']
max_pooling2d (MaxPooling2 (None, 23, 23, 64) 0 ['activation[0][0]']
D)
res_2_conv_a (Conv2D) (None, 23, 23, 64) 4160 ['max_pooling2d[0][0]']
max_pooling2d_1 (MaxPoolin (None, 11, 11, 64) 0 ['res_2_conv_a[0][0]']
g2D)
bn_2_conv_a (BatchNormaliz (None, 11, 11, 64) 256 ['max_pooling2d_1[0][0]']
ation)
activation_1 (Activation) (None, 11, 11, 64) 0 ['bn_2_conv_a[0][0]']
res_2_conv_b (Conv2D) (None, 11, 11, 64) 36928 ['activation_1[0][0]']
bn_2_conv_b (BatchNormaliz (None, 11, 11, 64) 256 ['res_2_conv_b[0][0]']
ation)
activation_2 (Activation) (None, 11, 11, 64) 0 ['bn_2_conv_b[0][0]']
res_2_conv_copy (Conv2D) (None, 23, 23, 256) 16640 ['max_pooling2d[0][0]']
res_2_conv_c (Conv2D) (None, 11, 11, 256) 16640 ['activation_2[0][0]']
max_pooling2d_2 (MaxPoolin (None, 11, 11, 256) 0 ['res_2_conv_copy[0][0]']
g2D)
bn_2_conv_c (BatchNormaliz (None, 11, 11, 256) 1024 ['res_2_conv_c[0][0]']
ation)
bn_2_conv_copy (BatchNorma (None, 11, 11, 256) 1024 ['max_pooling2d_2[0][0]']
lization)
add (Add) (None, 11, 11, 256) 0 ['bn_2_conv_c[0][0]',
'bn_2_conv_copy[0][0]']
activation_3 (Activation) (None, 11, 11, 256) 0 ['add[0][0]']
res_2_identity_1_a (Conv2D (None, 11, 11, 64) 16448 ['activation_3[0][0]']
)
bn_2_identity_1_a (BatchNo (None, 11, 11, 64) 256 ['res_2_identity_1_a[0][0]']
rmalization)
activation_4 (Activation) (None, 11, 11, 64) 0 ['bn_2_identity_1_a[0][0]']
res_2_identity_1_b (Conv2D (None, 11, 11, 64) 36928 ['activation_4[0][0]']
)
bn_2_identity_1_b (BatchNo (None, 11, 11, 64) 256 ['res_2_identity_1_b[0][0]']
rmalization)
activation_5 (Activation) (None, 11, 11, 64) 0 ['bn_2_identity_1_b[0][0]']
res_2_identity_1_c (Conv2D (None, 11, 11, 256) 16640 ['activation_5[0][0]']
)
bn_2_identity_1_c (BatchNo (None, 11, 11, 256) 1024 ['res_2_identity_1_c[0][0]']
rmalization)
add_1 (Add) (None, 11, 11, 256) 0 ['bn_2_identity_1_c[0][0]',
'activation_3[0][0]']
activation_6 (Activation) (None, 11, 11, 256) 0 ['add_1[0][0]']
res_2_identity_2_a (Conv2D (None, 11, 11, 64) 16448 ['activation_6[0][0]']
)
bn_2_identity_2_a (BatchNo (None, 11, 11, 64) 256 ['res_2_identity_2_a[0][0]']
rmalization)
activation_7 (Activation) (None, 11, 11, 64) 0 ['bn_2_identity_2_a[0][0]']
res_2_identity_2_b (Conv2D (None, 11, 11, 64) 36928 ['activation_7[0][0]']
)
bn_2_identity_2_b (BatchNo (None, 11, 11, 64) 256 ['res_2_identity_2_b[0][0]']
rmalization)
activation_8 (Activation) (None, 11, 11, 64) 0 ['bn_2_identity_2_b[0][0]']
res_2_identity_2_c (Conv2D (None, 11, 11, 256) 16640 ['activation_8[0][0]']
)
bn_2_identity_2_c (BatchNo (None, 11, 11, 256) 1024 ['res_2_identity_2_c[0][0]']
rmalization)
add_2 (Add) (None, 11, 11, 256) 0 ['bn_2_identity_2_c[0][0]',
'activation_6[0][0]']
activation_9 (Activation) (None, 11, 11, 256) 0 ['add_2[0][0]']
res_3_conv_a (Conv2D) (None, 11, 11, 128) 32896 ['activation_9[0][0]']
max_pooling2d_3 (MaxPoolin (None, 5, 5, 128) 0 ['res_3_conv_a[0][0]']
g2D)
bn_3_conv_a (BatchNormaliz (None, 5, 5, 128) 512 ['max_pooling2d_3[0][0]']
ation)
activation_10 (Activation) (None, 5, 5, 128) 0 ['bn_3_conv_a[0][0]']
res_3_conv_b (Conv2D) (None, 5, 5, 128) 147584 ['activation_10[0][0]']
bn_3_conv_b (BatchNormaliz (None, 5, 5, 128) 512 ['res_3_conv_b[0][0]']
ation)
activation_11 (Activation) (None, 5, 5, 128) 0 ['bn_3_conv_b[0][0]']
res_3_conv_copy (Conv2D) (None, 11, 11, 512) 131584 ['activation_9[0][0]']
res_3_conv_c (Conv2D) (None, 5, 5, 512) 66048 ['activation_11[0][0]']
max_pooling2d_4 (MaxPoolin (None, 5, 5, 512) 0 ['res_3_conv_copy[0][0]']
g2D)
bn_3_conv_c (BatchNormaliz (None, 5, 5, 512) 2048 ['res_3_conv_c[0][0]']
ation)
bn_3_conv_copy (BatchNorma (None, 5, 5, 512) 2048 ['max_pooling2d_4[0][0]']
lization)
add_3 (Add) (None, 5, 5, 512) 0 ['bn_3_conv_c[0][0]',
'bn_3_conv_copy[0][0]']
activation_12 (Activation) (None, 5, 5, 512) 0 ['add_3[0][0]']
res_3_identity_1_a (Conv2D (None, 5, 5, 128) 65664 ['activation_12[0][0]']
)
bn_3_identity_1_a (BatchNo (None, 5, 5, 128) 512 ['res_3_identity_1_a[0][0]']
rmalization)
activation_13 (Activation) (None, 5, 5, 128) 0 ['bn_3_identity_1_a[0][0]']
res_3_identity_1_b (Conv2D (None, 5, 5, 128) 147584 ['activation_13[0][0]']
)
bn_3_identity_1_b (BatchNo (None, 5, 5, 128) 512 ['res_3_identity_1_b[0][0]']
rmalization)
activation_14 (Activation) (None, 5, 5, 128) 0 ['bn_3_identity_1_b[0][0]']
res_3_identity_1_c (Conv2D (None, 5, 5, 512) 66048 ['activation_14[0][0]']
)
bn_3_identity_1_c (BatchNo (None, 5, 5, 512) 2048 ['res_3_identity_1_c[0][0]']
rmalization)
add_4 (Add) (None, 5, 5, 512) 0 ['bn_3_identity_1_c[0][0]',
'activation_12[0][0]']
activation_15 (Activation) (None, 5, 5, 512) 0 ['add_4[0][0]']
res_3_identity_2_a (Conv2D (None, 5, 5, 128) 65664 ['activation_15[0][0]']
)
bn_3_identity_2_a (BatchNo (None, 5, 5, 128) 512 ['res_3_identity_2_a[0][0]']
rmalization)
activation_16 (Activation) (None, 5, 5, 128) 0 ['bn_3_identity_2_a[0][0]']
res_3_identity_2_b (Conv2D (None, 5, 5, 128) 147584 ['activation_16[0][0]']
)
bn_3_identity_2_b (BatchNo (None, 5, 5, 128) 512 ['res_3_identity_2_b[0][0]']
rmalization)
activation_17 (Activation) (None, 5, 5, 128) 0 ['bn_3_identity_2_b[0][0]']
res_3_identity_2_c (Conv2D (None, 5, 5, 512) 66048 ['activation_17[0][0]']
)
bn_3_identity_2_c (BatchNo (None, 5, 5, 512) 2048 ['res_3_identity_2_c[0][0]']
rmalization)
add_5 (Add) (None, 5, 5, 512) 0 ['bn_3_identity_2_c[0][0]',
'activation_15[0][0]']
activation_18 (Activation) (None, 5, 5, 512) 0 ['add_5[0][0]']
Averagea_Pooling (AverageP (None, 2, 2, 512) 0 ['activation_18[0][0]']
ooling2D)
flatten (Flatten) (None, 2048) 0 ['Averagea_Pooling[0][0]']
dense (Dense) (None, 4096) 8392704 ['flatten[0][0]']
dropout (Dropout) (None, 4096) 0 ['dense[0][0]']
dense_1 (Dense) (None, 2048) 8390656 ['dropout[0][0]']
dropout_1 (Dropout) (None, 2048) 0 ['dense_1[0][0]']
dense_2 (Dense) (None, 30) 61470 ['dropout_1[0][0]']
==================================================================================================
Total params: 18016286 (68.73 MB)
Trainable params: 18007710 (68.69 MB)
Non-trainable params: 8576 (33.50 KB)
__________________________________________________________________________________________________
MINI CHALLENGE:
adam = tf.keras.optimizers.Adam(lr = 0.001, beta_1=0.9, beta_2=0.999, amsgrad=False)
model.compile(loss="mean_squared_error", optimizer = adam, metrics = ['accuracy'])
WARNING:absl:`lr` is deprecated in Keras optimizer, please use `learning_rate` or use the legacy optimizer, e.g.,tf.keras.optimizers.legacy.Adam.
# save the best model with least validation loss
checkpointer = ModelCheckpoint(filepath = "weights.hdf5", verbose = 1, save_best_only = True)
history = model.fit(X_train, y_train, batch_size = 256, epochs= 100, validation_split = 0.05, callbacks=[checkpointer])
Epoch 1/100 22/22 [==============================] - ETA: 0s - loss: 368.3590 - accuracy: 0.3755 Epoch 1: val_loss improved from inf to 2047.11682, saving model to weights.hdf5
C:\Users\SOMNATH\anaconda3\lib\site-packages\keras\src\engine\training.py:3000: UserWarning: You are saving your model as an HDF5 file via `model.save()`. This file format is considered legacy. We recommend using instead the native Keras format, e.g. `model.save('my_model.keras')`.
saving_api.save_model(
22/22 [==============================] - 192s 7s/step - loss: 368.3590 - accuracy: 0.3755 - val_loss: 2047.1168 - val_accuracy: 0.6817 Epoch 2/100 22/22 [==============================] - ETA: 0s - loss: 131.9641 - accuracy: 0.5857 Epoch 2: val_loss improved from 2047.11682 to 1557.73181, saving model to weights.hdf5 22/22 [==============================] - 177s 8s/step - loss: 131.9641 - accuracy: 0.5857 - val_loss: 1557.7318 - val_accuracy: 0.6817 Epoch 3/100 22/22 [==============================] - ETA: 0s - loss: 90.9153 - accuracy: 0.6333 Epoch 3: val_loss improved from 1557.73181 to 1332.66333, saving model to weights.hdf5 22/22 [==============================] - 151s 7s/step - loss: 90.9153 - accuracy: 0.6333 - val_loss: 1332.6633 - val_accuracy: 0.6817 Epoch 4/100 22/22 [==============================] - ETA: 0s - loss: 67.4141 - accuracy: 0.6130 Epoch 4: val_loss improved from 1332.66333 to 1282.34607, saving model to weights.hdf5 22/22 [==============================] - 164s 7s/step - loss: 67.4141 - accuracy: 0.6130 - val_loss: 1282.3461 - val_accuracy: 0.6817 Epoch 5/100 22/22 [==============================] - ETA: 0s - loss: 52.2441 - accuracy: 0.5881 Epoch 5: val_loss improved from 1282.34607 to 1028.10339, saving model to weights.hdf5 22/22 [==============================] - 224s 10s/step - loss: 52.2441 - accuracy: 0.5881 - val_loss: 1028.1034 - val_accuracy: 0.6817 Epoch 6/100 22/22 [==============================] - ETA: 0s - loss: 39.3052 - accuracy: 0.6036 Epoch 6: val_loss improved from 1028.10339 to 841.44067, saving model to weights.hdf5 22/22 [==============================] - 213s 10s/step - loss: 39.3052 - accuracy: 0.6036 - val_loss: 841.4407 - val_accuracy: 0.6817 Epoch 7/100 22/22 [==============================] - ETA: 0s - loss: 30.7309 - accuracy: 0.6058 Epoch 7: val_loss improved from 841.44067 to 746.39240, saving model to weights.hdf5 22/22 [==============================] - 213s 10s/step - loss: 30.7309 - accuracy: 0.6058 - val_loss: 746.3924 - val_accuracy: 0.6817 Epoch 8/100 22/22 [==============================] - ETA: 0s - loss: 31.4347 - accuracy: 0.6234 Epoch 8: val_loss improved from 746.39240 to 590.82214, saving model to weights.hdf5 22/22 [==============================] - 206s 9s/step - loss: 31.4347 - accuracy: 0.6234 - val_loss: 590.8221 - val_accuracy: 0.6817 Epoch 9/100 22/22 [==============================] - ETA: 0s - loss: 30.8841 - accuracy: 0.5884 Epoch 9: val_loss improved from 590.82214 to 546.01263, saving model to weights.hdf5 22/22 [==============================] - 204s 9s/step - loss: 30.8841 - accuracy: 0.5884 - val_loss: 546.0126 - val_accuracy: 0.6817 Epoch 10/100 22/22 [==============================] - ETA: 0s - loss: 29.4665 - accuracy: 0.6280 Epoch 10: val_loss improved from 546.01263 to 540.99323, saving model to weights.hdf5 22/22 [==============================] - 204s 9s/step - loss: 29.4665 - accuracy: 0.6280 - val_loss: 540.9932 - val_accuracy: 0.6817 Epoch 11/100 22/22 [==============================] - ETA: 0s - loss: 24.4492 - accuracy: 0.6212 Epoch 11: val_loss improved from 540.99323 to 396.03537, saving model to weights.hdf5 22/22 [==============================] - 207s 9s/step - loss: 24.4492 - accuracy: 0.6212 - val_loss: 396.0354 - val_accuracy: 0.6817 Epoch 12/100 22/22 [==============================] - ETA: 0s - loss: 22.2905 - accuracy: 0.6198 Epoch 12: val_loss did not improve from 396.03537 22/22 [==============================] - 204s 9s/step - loss: 22.2905 - accuracy: 0.6198 - val_loss: 411.1057 - val_accuracy: 0.6817 Epoch 13/100 22/22 [==============================] - ETA: 0s - loss: 22.0869 - accuracy: 0.6163 Epoch 13: val_loss did not improve from 396.03537 22/22 [==============================] - 206s 9s/step - loss: 22.0869 - accuracy: 0.6163 - val_loss: 398.4462 - val_accuracy: 0.6817 Epoch 14/100 22/22 [==============================] - ETA: 0s - loss: 19.6602 - accuracy: 0.6345 Epoch 14: val_loss improved from 396.03537 to 324.96036, saving model to weights.hdf5 22/22 [==============================] - 215s 10s/step - loss: 19.6602 - accuracy: 0.6345 - val_loss: 324.9604 - val_accuracy: 0.6817 Epoch 15/100 22/22 [==============================] - ETA: 0s - loss: 22.8431 - accuracy: 0.6500 Epoch 15: val_loss did not improve from 324.96036 22/22 [==============================] - 201s 9s/step - loss: 22.8431 - accuracy: 0.6500 - val_loss: 361.3255 - val_accuracy: 0.6298 Epoch 16/100 22/22 [==============================] - ETA: 0s - loss: 21.2826 - accuracy: 0.6234 Epoch 16: val_loss improved from 324.96036 to 263.49847, saving model to weights.hdf5 22/22 [==============================] - 208s 9s/step - loss: 21.2826 - accuracy: 0.6234 - val_loss: 263.4985 - val_accuracy: 0.6817 Epoch 17/100 22/22 [==============================] - ETA: 0s - loss: 22.0523 - accuracy: 0.6413 Epoch 17: val_loss improved from 263.49847 to 179.05099, saving model to weights.hdf5 22/22 [==============================] - 206s 9s/step - loss: 22.0523 - accuracy: 0.6413 - val_loss: 179.0510 - val_accuracy: 0.6782 Epoch 18/100 22/22 [==============================] - ETA: 0s - loss: 20.8796 - accuracy: 0.6424 Epoch 18: val_loss improved from 179.05099 to 165.43207, saving model to weights.hdf5 22/22 [==============================] - 206s 9s/step - loss: 20.8796 - accuracy: 0.6424 - val_loss: 165.4321 - val_accuracy: 0.6817 Epoch 19/100 22/22 [==============================] - ETA: 0s - loss: 21.0007 - accuracy: 0.6429 Epoch 19: val_loss did not improve from 165.43207 22/22 [==============================] - 201s 9s/step - loss: 21.0007 - accuracy: 0.6429 - val_loss: 230.0081 - val_accuracy: 0.6817 Epoch 20/100 22/22 [==============================] - ETA: 0s - loss: 17.4146 - accuracy: 0.6478 Epoch 20: val_loss improved from 165.43207 to 164.17595, saving model to weights.hdf5 22/22 [==============================] - 206s 9s/step - loss: 17.4146 - accuracy: 0.6478 - val_loss: 164.1759 - val_accuracy: 0.6817 Epoch 21/100 22/22 [==============================] - ETA: 0s - loss: 14.5269 - accuracy: 0.6662 Epoch 21: val_loss improved from 164.17595 to 147.37364, saving model to weights.hdf5 22/22 [==============================] - 205s 9s/step - loss: 14.5269 - accuracy: 0.6662 - val_loss: 147.3736 - val_accuracy: 0.6817 Epoch 22/100 22/22 [==============================] - ETA: 0s - loss: 13.7953 - accuracy: 0.6595 Epoch 22: val_loss improved from 147.37364 to 75.98418, saving model to weights.hdf5 22/22 [==============================] - 209s 10s/step - loss: 13.7953 - accuracy: 0.6595 - val_loss: 75.9842 - val_accuracy: 0.6817 Epoch 23/100 22/22 [==============================] - ETA: 0s - loss: 14.0337 - accuracy: 0.6630 Epoch 23: val_loss improved from 75.98418 to 74.43785, saving model to weights.hdf5 22/22 [==============================] - 204s 9s/step - loss: 14.0337 - accuracy: 0.6630 - val_loss: 74.4379 - val_accuracy: 0.6886 Epoch 24/100 22/22 [==============================] - ETA: 0s - loss: 12.7034 - accuracy: 0.6715 Epoch 24: val_loss improved from 74.43785 to 61.75398, saving model to weights.hdf5 22/22 [==============================] - 204s 9s/step - loss: 12.7034 - accuracy: 0.6715 - val_loss: 61.7540 - val_accuracy: 0.6782 Epoch 25/100 22/22 [==============================] - ETA: 0s - loss: 13.6459 - accuracy: 0.6739 Epoch 25: val_loss did not improve from 61.75398 22/22 [==============================] - 199s 9s/step - loss: 13.6459 - accuracy: 0.6739 - val_loss: 72.6010 - val_accuracy: 0.6955 Epoch 26/100 22/22 [==============================] - ETA: 0s - loss: 14.7361 - accuracy: 0.6717 Epoch 26: val_loss did not improve from 61.75398 22/22 [==============================] - 194s 9s/step - loss: 14.7361 - accuracy: 0.6717 - val_loss: 62.7522 - val_accuracy: 0.6990 Epoch 27/100 22/22 [==============================] - ETA: 0s - loss: 16.4633 - accuracy: 0.6662 Epoch 27: val_loss improved from 61.75398 to 59.10701, saving model to weights.hdf5 22/22 [==============================] - 201s 9s/step - loss: 16.4633 - accuracy: 0.6662 - val_loss: 59.1070 - val_accuracy: 0.6713 Epoch 28/100 22/22 [==============================] - ETA: 0s - loss: 11.6515 - accuracy: 0.6732 Epoch 28: val_loss did not improve from 59.10701 22/22 [==============================] - 199s 9s/step - loss: 11.6515 - accuracy: 0.6732 - val_loss: 60.9970 - val_accuracy: 0.6920 Epoch 29/100 22/22 [==============================] - ETA: 0s - loss: 12.7383 - accuracy: 0.6857 Epoch 29: val_loss improved from 59.10701 to 45.26060, saving model to weights.hdf5 22/22 [==============================] - 202s 9s/step - loss: 12.7383 - accuracy: 0.6857 - val_loss: 45.2606 - val_accuracy: 0.6851 Epoch 30/100 22/22 [==============================] - ETA: 0s - loss: 13.5023 - accuracy: 0.6672 Epoch 30: val_loss did not improve from 45.26060 22/22 [==============================] - 199s 9s/step - loss: 13.5023 - accuracy: 0.6672 - val_loss: 53.2231 - val_accuracy: 0.6678 Epoch 31/100 22/22 [==============================] - ETA: 0s - loss: 11.9715 - accuracy: 0.6755 Epoch 31: val_loss did not improve from 45.26060 22/22 [==============================] - 199s 9s/step - loss: 11.9715 - accuracy: 0.6755 - val_loss: 47.4496 - val_accuracy: 0.6990 Epoch 32/100 22/22 [==============================] - ETA: 0s - loss: 12.1619 - accuracy: 0.6768 Epoch 32: val_loss did not improve from 45.26060 22/22 [==============================] - 197s 9s/step - loss: 12.1619 - accuracy: 0.6768 - val_loss: 60.0555 - val_accuracy: 0.6609 Epoch 33/100 22/22 [==============================] - ETA: 0s - loss: 16.7526 - accuracy: 0.6615 Epoch 33: val_loss did not improve from 45.26060 22/22 [==============================] - 200s 9s/step - loss: 16.7526 - accuracy: 0.6615 - val_loss: 112.1316 - val_accuracy: 0.6851 Epoch 34/100 22/22 [==============================] - ETA: 0s - loss: 14.3154 - accuracy: 0.6868 Epoch 34: val_loss did not improve from 45.26060 22/22 [==============================] - 197s 9s/step - loss: 14.3154 - accuracy: 0.6868 - val_loss: 64.0136 - val_accuracy: 0.6471 Epoch 35/100 22/22 [==============================] - ETA: 0s - loss: 13.3784 - accuracy: 0.6757 Epoch 35: val_loss did not improve from 45.26060 22/22 [==============================] - 199s 9s/step - loss: 13.3784 - accuracy: 0.6757 - val_loss: 118.3325 - val_accuracy: 0.6713 Epoch 36/100 22/22 [==============================] - ETA: 0s - loss: 11.8468 - accuracy: 0.6930 Epoch 36: val_loss did not improve from 45.26060 22/22 [==============================] - 198s 9s/step - loss: 11.8468 - accuracy: 0.6930 - val_loss: 58.7041 - val_accuracy: 0.6678 Epoch 37/100 22/22 [==============================] - ETA: 0s - loss: 14.4722 - accuracy: 0.6866 Epoch 37: val_loss did not improve from 45.26060 22/22 [==============================] - 198s 9s/step - loss: 14.4722 - accuracy: 0.6866 - val_loss: 161.6316 - val_accuracy: 0.6713 Epoch 38/100 22/22 [==============================] - ETA: 0s - loss: 14.6333 - accuracy: 0.6648 Epoch 38: val_loss did not improve from 45.26060 22/22 [==============================] - 199s 9s/step - loss: 14.6333 - accuracy: 0.6648 - val_loss: 101.1918 - val_accuracy: 0.6782 Epoch 39/100 22/22 [==============================] - ETA: 0s - loss: 12.4200 - accuracy: 0.6963 Epoch 39: val_loss did not improve from 45.26060 22/22 [==============================] - 196s 9s/step - loss: 12.4200 - accuracy: 0.6963 - val_loss: 71.9468 - val_accuracy: 0.6713 Epoch 40/100 22/22 [==============================] - ETA: 0s - loss: 13.0707 - accuracy: 0.6928 Epoch 40: val_loss did not improve from 45.26060 22/22 [==============================] - 199s 9s/step - loss: 13.0707 - accuracy: 0.6928 - val_loss: 48.1910 - val_accuracy: 0.6782 Epoch 41/100 22/22 [==============================] - ETA: 0s - loss: 14.4161 - accuracy: 0.6784 Epoch 41: val_loss did not improve from 45.26060 22/22 [==============================] - 199s 9s/step - loss: 14.4161 - accuracy: 0.6784 - val_loss: 59.1426 - val_accuracy: 0.6782 Epoch 42/100 22/22 [==============================] - ETA: 0s - loss: 11.5196 - accuracy: 0.6994 Epoch 42: val_loss did not improve from 45.26060 22/22 [==============================] - 200s 9s/step - loss: 11.5196 - accuracy: 0.6994 - val_loss: 66.7999 - val_accuracy: 0.6886 Epoch 43/100 22/22 [==============================] - ETA: 0s - loss: 10.8610 - accuracy: 0.7050 Epoch 43: val_loss did not improve from 45.26060 22/22 [==============================] - 198s 9s/step - loss: 10.8610 - accuracy: 0.7050 - val_loss: 81.7714 - val_accuracy: 0.6817 Epoch 44/100 22/22 [==============================] - ETA: 0s - loss: 10.8483 - accuracy: 0.7018 Epoch 44: val_loss did not improve from 45.26060 22/22 [==============================] - 198s 9s/step - loss: 10.8483 - accuracy: 0.7018 - val_loss: 68.1884 - val_accuracy: 0.6609 Epoch 45/100 22/22 [==============================] - ETA: 0s - loss: 12.1906 - accuracy: 0.6994 Epoch 45: val_loss did not improve from 45.26060 22/22 [==============================] - 199s 9s/step - loss: 12.1906 - accuracy: 0.6994 - val_loss: 66.7759 - val_accuracy: 0.6609 Epoch 46/100 22/22 [==============================] - ETA: 0s - loss: 13.2113 - accuracy: 0.7025 Epoch 46: val_loss did not improve from 45.26060 22/22 [==============================] - 198s 9s/step - loss: 13.2113 - accuracy: 0.7025 - val_loss: 58.8961 - val_accuracy: 0.6920 Epoch 47/100 22/22 [==============================] - ETA: 0s - loss: 9.8927 - accuracy: 0.7047 Epoch 47: val_loss did not improve from 45.26060 22/22 [==============================] - 199s 9s/step - loss: 9.8927 - accuracy: 0.7047 - val_loss: 48.7727 - val_accuracy: 0.6851 Epoch 48/100 22/22 [==============================] - ETA: 0s - loss: 8.3404 - accuracy: 0.7034 Epoch 48: val_loss did not improve from 45.26060 22/22 [==============================] - 197s 9s/step - loss: 8.3404 - accuracy: 0.7034 - val_loss: 64.4797 - val_accuracy: 0.6817 Epoch 49/100 22/22 [==============================] - ETA: 0s - loss: 8.4113 - accuracy: 0.7191 Epoch 49: val_loss did not improve from 45.26060 22/22 [==============================] - 198s 9s/step - loss: 8.4113 - accuracy: 0.7191 - val_loss: 63.8215 - val_accuracy: 0.6574 Epoch 50/100 22/22 [==============================] - ETA: 0s - loss: 9.3857 - accuracy: 0.7145 Epoch 50: val_loss did not improve from 45.26060 22/22 [==============================] - 197s 9s/step - loss: 9.3857 - accuracy: 0.7145 - val_loss: 48.6238 - val_accuracy: 0.6332 Epoch 51/100 22/22 [==============================] - ETA: 0s - loss: 7.6506 - accuracy: 0.7222 Epoch 51: val_loss did not improve from 45.26060 22/22 [==============================] - 199s 9s/step - loss: 7.6506 - accuracy: 0.7222 - val_loss: 45.6007 - val_accuracy: 0.7093 Epoch 52/100 22/22 [==============================] - ETA: 0s - loss: 7.1556 - accuracy: 0.7242 Epoch 52: val_loss improved from 45.26060 to 43.23400, saving model to weights.hdf5 22/22 [==============================] - 201s 9s/step - loss: 7.1556 - accuracy: 0.7242 - val_loss: 43.2340 - val_accuracy: 0.7232 Epoch 53/100 22/22 [==============================] - ETA: 0s - loss: 8.1249 - accuracy: 0.7233 Epoch 53: val_loss improved from 43.23400 to 42.19568, saving model to weights.hdf5 22/22 [==============================] - 202s 9s/step - loss: 8.1249 - accuracy: 0.7233 - val_loss: 42.1957 - val_accuracy: 0.7232 Epoch 54/100 22/22 [==============================] - ETA: 0s - loss: 7.0639 - accuracy: 0.7284 Epoch 54: val_loss did not improve from 42.19568 22/22 [==============================] - 198s 9s/step - loss: 7.0639 - accuracy: 0.7284 - val_loss: 45.9732 - val_accuracy: 0.7232 Epoch 55/100 22/22 [==============================] - ETA: 0s - loss: 7.1404 - accuracy: 0.7316 Epoch 55: val_loss did not improve from 42.19568 22/22 [==============================] - 198s 9s/step - loss: 7.1404 - accuracy: 0.7316 - val_loss: 53.8167 - val_accuracy: 0.7163 Epoch 56/100 22/22 [==============================] - ETA: 0s - loss: 9.5730 - accuracy: 0.7249 Epoch 56: val_loss did not improve from 42.19568 22/22 [==============================] - 198s 9s/step - loss: 9.5730 - accuracy: 0.7249 - val_loss: 44.8587 - val_accuracy: 0.7266 Epoch 57/100 22/22 [==============================] - ETA: 0s - loss: 7.1140 - accuracy: 0.7353 Epoch 57: val_loss did not improve from 42.19568 22/22 [==============================] - 200s 9s/step - loss: 7.1140 - accuracy: 0.7353 - val_loss: 42.5660 - val_accuracy: 0.7093 Epoch 58/100 22/22 [==============================] - ETA: 0s - loss: 5.9810 - accuracy: 0.7335 Epoch 58: val_loss did not improve from 42.19568 22/22 [==============================] - 196s 9s/step - loss: 5.9810 - accuracy: 0.7335 - val_loss: 42.3425 - val_accuracy: 0.7336 Epoch 59/100 22/22 [==============================] - ETA: 0s - loss: 6.3529 - accuracy: 0.7417 Epoch 59: val_loss improved from 42.19568 to 41.67676, saving model to weights.hdf5 22/22 [==============================] - 202s 9s/step - loss: 6.3529 - accuracy: 0.7417 - val_loss: 41.6768 - val_accuracy: 0.7232 Epoch 60/100 22/22 [==============================] - ETA: 0s - loss: 6.2040 - accuracy: 0.7344 Epoch 60: val_loss did not improve from 41.67676 22/22 [==============================] - 199s 9s/step - loss: 6.2040 - accuracy: 0.7344 - val_loss: 42.7727 - val_accuracy: 0.7093 Epoch 61/100 22/22 [==============================] - ETA: 0s - loss: 6.0538 - accuracy: 0.7391 Epoch 61: val_loss did not improve from 41.67676 22/22 [==============================] - 198s 9s/step - loss: 6.0538 - accuracy: 0.7391 - val_loss: 42.6630 - val_accuracy: 0.7163 Epoch 62/100 22/22 [==============================] - ETA: 0s - loss: 7.7563 - accuracy: 0.7408 Epoch 62: val_loss did not improve from 41.67676 22/22 [==============================] - 185s 8s/step - loss: 7.7563 - accuracy: 0.7408 - val_loss: 42.7542 - val_accuracy: 0.7128 Epoch 63/100 22/22 [==============================] - ETA: 0s - loss: 6.9452 - accuracy: 0.7413 Epoch 63: val_loss improved from 41.67676 to 40.36114, saving model to weights.hdf5 22/22 [==============================] - 117s 5s/step - loss: 6.9452 - accuracy: 0.7413 - val_loss: 40.3611 - val_accuracy: 0.7163 Epoch 64/100 22/22 [==============================] - ETA: 0s - loss: 6.5667 - accuracy: 0.7408 Epoch 64: val_loss improved from 40.36114 to 39.62456, saving model to weights.hdf5 22/22 [==============================] - 107s 5s/step - loss: 6.5667 - accuracy: 0.7408 - val_loss: 39.6246 - val_accuracy: 0.7266 Epoch 65/100 22/22 [==============================] - ETA: 0s - loss: 6.1841 - accuracy: 0.7433 Epoch 65: val_loss did not improve from 39.62456 22/22 [==============================] - 104s 5s/step - loss: 6.1841 - accuracy: 0.7433 - val_loss: 42.7494 - val_accuracy: 0.7163 Epoch 66/100 22/22 [==============================] - ETA: 0s - loss: 7.6944 - accuracy: 0.7453 Epoch 66: val_loss improved from 39.62456 to 39.16723, saving model to weights.hdf5 22/22 [==============================] - 106s 5s/step - loss: 7.6944 - accuracy: 0.7453 - val_loss: 39.1672 - val_accuracy: 0.6990 Epoch 67/100 22/22 [==============================] - ETA: 0s - loss: 6.4657 - accuracy: 0.7411 Epoch 67: val_loss did not improve from 39.16723 22/22 [==============================] - 102s 5s/step - loss: 6.4657 - accuracy: 0.7411 - val_loss: 39.9409 - val_accuracy: 0.7024 Epoch 68/100 22/22 [==============================] - ETA: 0s - loss: 6.2810 - accuracy: 0.7499 Epoch 68: val_loss did not improve from 39.16723 22/22 [==============================] - 102s 5s/step - loss: 6.2810 - accuracy: 0.7499 - val_loss: 41.1544 - val_accuracy: 0.7197 Epoch 69/100 22/22 [==============================] - ETA: 0s - loss: 6.2011 - accuracy: 0.7435 Epoch 69: val_loss did not improve from 39.16723 22/22 [==============================] - 102s 5s/step - loss: 6.2011 - accuracy: 0.7435 - val_loss: 41.4151 - val_accuracy: 0.7128 Epoch 70/100 22/22 [==============================] - ETA: 0s - loss: 6.4810 - accuracy: 0.7550 Epoch 70: val_loss did not improve from 39.16723 22/22 [==============================] - 103s 5s/step - loss: 6.4810 - accuracy: 0.7550 - val_loss: 43.1370 - val_accuracy: 0.7093 Epoch 71/100 22/22 [==============================] - ETA: 0s - loss: 5.9547 - accuracy: 0.7537 Epoch 71: val_loss did not improve from 39.16723 22/22 [==============================] - 103s 5s/step - loss: 5.9547 - accuracy: 0.7537 - val_loss: 41.5262 - val_accuracy: 0.7128 Epoch 72/100 22/22 [==============================] - ETA: 0s - loss: 5.7732 - accuracy: 0.7515 Epoch 72: val_loss did not improve from 39.16723 22/22 [==============================] - 103s 5s/step - loss: 5.7732 - accuracy: 0.7515 - val_loss: 43.6323 - val_accuracy: 0.7059 Epoch 73/100 22/22 [==============================] - ETA: 0s - loss: 5.8383 - accuracy: 0.7553 Epoch 73: val_loss did not improve from 39.16723 22/22 [==============================] - 103s 5s/step - loss: 5.8383 - accuracy: 0.7553 - val_loss: 40.2037 - val_accuracy: 0.7336 Epoch 74/100 22/22 [==============================] - ETA: 0s - loss: 5.0842 - accuracy: 0.7677 Epoch 74: val_loss did not improve from 39.16723 22/22 [==============================] - 100s 5s/step - loss: 5.0842 - accuracy: 0.7677 - val_loss: 39.6512 - val_accuracy: 0.7197 Epoch 75/100 22/22 [==============================] - ETA: 0s - loss: 5.0966 - accuracy: 0.7553 Epoch 75: val_loss did not improve from 39.16723 22/22 [==============================] - 103s 5s/step - loss: 5.0966 - accuracy: 0.7553 - val_loss: 39.7781 - val_accuracy: 0.7093 Epoch 76/100 22/22 [==============================] - ETA: 0s - loss: 6.0752 - accuracy: 0.7599 Epoch 76: val_loss did not improve from 39.16723 22/22 [==============================] - 102s 5s/step - loss: 6.0752 - accuracy: 0.7599 - val_loss: 40.4597 - val_accuracy: 0.7197 Epoch 77/100 22/22 [==============================] - ETA: 0s - loss: 6.1332 - accuracy: 0.7593 Epoch 77: val_loss did not improve from 39.16723 22/22 [==============================] - 102s 5s/step - loss: 6.1332 - accuracy: 0.7593 - val_loss: 39.7377 - val_accuracy: 0.7059 Epoch 78/100 22/22 [==============================] - ETA: 0s - loss: 5.9625 - accuracy: 0.7584 Epoch 78: val_loss improved from 39.16723 to 37.37673, saving model to weights.hdf5 22/22 [==============================] - 105s 5s/step - loss: 5.9625 - accuracy: 0.7584 - val_loss: 37.3767 - val_accuracy: 0.7197 Epoch 79/100 22/22 [==============================] - ETA: 0s - loss: 5.6913 - accuracy: 0.7626 Epoch 79: val_loss did not improve from 37.37673 22/22 [==============================] - 102s 5s/step - loss: 5.6913 - accuracy: 0.7626 - val_loss: 38.4324 - val_accuracy: 0.7059 Epoch 80/100 22/22 [==============================] - ETA: 0s - loss: 6.2300 - accuracy: 0.7659 Epoch 80: val_loss did not improve from 37.37673 22/22 [==============================] - 101s 5s/step - loss: 6.2300 - accuracy: 0.7659 - val_loss: 41.1832 - val_accuracy: 0.7232 Epoch 81/100 22/22 [==============================] - ETA: 0s - loss: 7.3742 - accuracy: 0.7610 Epoch 81: val_loss did not improve from 37.37673 22/22 [==============================] - 100s 5s/step - loss: 7.3742 - accuracy: 0.7610 - val_loss: 38.5324 - val_accuracy: 0.7266 Epoch 82/100 22/22 [==============================] - ETA: 0s - loss: 5.4107 - accuracy: 0.7604 Epoch 82: val_loss improved from 37.37673 to 37.27297, saving model to weights.hdf5 22/22 [==============================] - 104s 5s/step - loss: 5.4107 - accuracy: 0.7604 - val_loss: 37.2730 - val_accuracy: 0.7163 Epoch 83/100 22/22 [==============================] - ETA: 0s - loss: 4.7659 - accuracy: 0.7650 Epoch 83: val_loss improved from 37.27297 to 37.15763, saving model to weights.hdf5 22/22 [==============================] - 103s 5s/step - loss: 4.7659 - accuracy: 0.7650 - val_loss: 37.1576 - val_accuracy: 0.7370 Epoch 84/100 22/22 [==============================] - ETA: 0s - loss: 5.3764 - accuracy: 0.7756 Epoch 84: val_loss did not improve from 37.15763 22/22 [==============================] - 102s 5s/step - loss: 5.3764 - accuracy: 0.7756 - val_loss: 38.5773 - val_accuracy: 0.7128 Epoch 85/100 22/22 [==============================] - ETA: 0s - loss: 5.6808 - accuracy: 0.7721 Epoch 85: val_loss improved from 37.15763 to 36.86702, saving model to weights.hdf5 22/22 [==============================] - 103s 5s/step - loss: 5.6808 - accuracy: 0.7721 - val_loss: 36.8670 - val_accuracy: 0.7197 Epoch 86/100 22/22 [==============================] - ETA: 0s - loss: 5.0924 - accuracy: 0.7706 Epoch 86: val_loss did not improve from 36.86702 22/22 [==============================] - 102s 5s/step - loss: 5.0924 - accuracy: 0.7706 - val_loss: 36.9134 - val_accuracy: 0.7474 Epoch 87/100 22/22 [==============================] - ETA: 0s - loss: 6.0845 - accuracy: 0.7737 Epoch 87: val_loss did not improve from 36.86702 22/22 [==============================] - 102s 5s/step - loss: 6.0845 - accuracy: 0.7737 - val_loss: 39.6102 - val_accuracy: 0.7439 Epoch 88/100 22/22 [==============================] - ETA: 0s - loss: 5.4183 - accuracy: 0.7694 Epoch 88: val_loss improved from 36.86702 to 36.57550, saving model to weights.hdf5 22/22 [==============================] - 104s 5s/step - loss: 5.4183 - accuracy: 0.7694 - val_loss: 36.5755 - val_accuracy: 0.7163 Epoch 89/100 22/22 [==============================] - ETA: 0s - loss: 4.8560 - accuracy: 0.7732 Epoch 89: val_loss did not improve from 36.57550 22/22 [==============================] - 100s 5s/step - loss: 4.8560 - accuracy: 0.7732 - val_loss: 38.6981 - val_accuracy: 0.7266 Epoch 90/100 22/22 [==============================] - ETA: 0s - loss: 5.3308 - accuracy: 0.7750 Epoch 90: val_loss improved from 36.57550 to 35.50803, saving model to weights.hdf5 22/22 [==============================] - 105s 5s/step - loss: 5.3308 - accuracy: 0.7750 - val_loss: 35.5080 - val_accuracy: 0.7370 Epoch 91/100 22/22 [==============================] - ETA: 0s - loss: 4.7565 - accuracy: 0.7763 Epoch 91: val_loss did not improve from 35.50803 22/22 [==============================] - 103s 5s/step - loss: 4.7565 - accuracy: 0.7763 - val_loss: 36.5967 - val_accuracy: 0.7266 Epoch 92/100 22/22 [==============================] - ETA: 0s - loss: 4.5930 - accuracy: 0.7788 Epoch 92: val_loss did not improve from 35.50803 22/22 [==============================] - 103s 5s/step - loss: 4.5930 - accuracy: 0.7788 - val_loss: 36.9921 - val_accuracy: 0.7197 Epoch 93/100 22/22 [==============================] - ETA: 0s - loss: 5.3950 - accuracy: 0.7872 Epoch 93: val_loss did not improve from 35.50803 22/22 [==============================] - 102s 5s/step - loss: 5.3950 - accuracy: 0.7872 - val_loss: 37.1462 - val_accuracy: 0.7232 Epoch 94/100 22/22 [==============================] - ETA: 0s - loss: 5.6090 - accuracy: 0.7865 Epoch 94: val_loss did not improve from 35.50803 22/22 [==============================] - 101s 5s/step - loss: 5.6090 - accuracy: 0.7865 - val_loss: 41.1304 - val_accuracy: 0.7405 Epoch 95/100 22/22 [==============================] - ETA: 0s - loss: 6.2103 - accuracy: 0.7854 Epoch 95: val_loss did not improve from 35.50803 22/22 [==============================] - 101s 5s/step - loss: 6.2103 - accuracy: 0.7854 - val_loss: 43.9942 - val_accuracy: 0.6886 Epoch 96/100 22/22 [==============================] - ETA: 0s - loss: 6.7381 - accuracy: 0.7889 Epoch 96: val_loss did not improve from 35.50803 22/22 [==============================] - 101s 5s/step - loss: 6.7381 - accuracy: 0.7889 - val_loss: 41.1083 - val_accuracy: 0.7232 Epoch 97/100 22/22 [==============================] - ETA: 0s - loss: 9.3805 - accuracy: 0.7830 Epoch 97: val_loss did not improve from 35.50803 22/22 [==============================] - 102s 5s/step - loss: 9.3805 - accuracy: 0.7830 - val_loss: 44.5100 - val_accuracy: 0.7301 Epoch 98/100 22/22 [==============================] - ETA: 0s - loss: 10.8477 - accuracy: 0.7677 Epoch 98: val_loss did not improve from 35.50803 22/22 [==============================] - 102s 5s/step - loss: 10.8477 - accuracy: 0.7677 - val_loss: 36.2096 - val_accuracy: 0.7612 Epoch 99/100 22/22 [==============================] - ETA: 0s - loss: 5.8466 - accuracy: 0.7805 Epoch 99: val_loss did not improve from 35.50803 22/22 [==============================] - 101s 5s/step - loss: 5.8466 - accuracy: 0.7805 - val_loss: 40.2092 - val_accuracy: 0.7612 Epoch 100/100 22/22 [==============================] - ETA: 0s - loss: 6.5470 - accuracy: 0.7792 Epoch 100: val_loss did not improve from 35.50803 22/22 [==============================] - 100s 5s/step - loss: 6.5470 - accuracy: 0.7792 - val_loss: 36.7217 - val_accuracy: 0.7093
MINI CHALLENGE:
# Evaluate trained model
result = model.evaluate(X_test,y_test)
print("Accuracy : {}".format(result[1]))
21/21 [==============================] - 3s 152ms/step - loss: 46.8234 - accuracy: 0.7150 Accuracy : 0.7149532437324524
# Getting the model history keys
history.history.keys()
dict_keys(['loss', 'accuracy', 'val_loss', 'val_accuracy'])
# plot the training artifacts
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('Model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train_loss','val_loss'], loc = 'upper right')
plt.show()
# Make prediction using the testing dataset
df_predict = model.predict(X_test)
21/21 [==============================] - 4s 147ms/step
# Print the rmse loss values
from sklearn.metrics import mean_squared_error
from math import sqrt
rms = sqrt(mean_squared_error(y_test, df_predict))
print("RMSE value : {}".format(rms))
RMSE value : 6.842761103282911
# Convert the predicted values into a dataframe
df_predict= pd.DataFrame(df_predict, columns = columns)
df_predict.head()
| left_eye_center_x | left_eye_center_y | right_eye_center_x | right_eye_center_y | left_eye_inner_corner_x | left_eye_inner_corner_y | left_eye_outer_corner_x | left_eye_outer_corner_y | right_eye_inner_corner_x | right_eye_inner_corner_y | ... | nose_tip_x | nose_tip_y | mouth_left_corner_x | mouth_left_corner_y | mouth_right_corner_x | mouth_right_corner_y | mouth_center_top_lip_x | mouth_center_top_lip_y | mouth_center_bottom_lip_x | mouth_center_bottom_lip_y | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 66.546814 | 34.878189 | 28.750193 | 34.561085 | 58.210403 | 36.018631 | 74.996208 | 35.719490 | 36.773174 | 35.861412 | ... | 46.685181 | 58.746872 | 63.644035 | 73.983780 | 30.295158 | 73.649971 | 46.943630 | 72.896469 | 46.819183 | 80.163353 |
| 1 | 29.482170 | 34.187202 | 59.123112 | 35.618015 | 36.232559 | 35.686367 | 21.861912 | 34.358337 | 53.752048 | 36.605457 | ... | 54.208168 | 59.233318 | 30.807693 | 77.615036 | 54.738274 | 78.263359 | 46.643562 | 77.315651 | 45.935997 | 78.526711 |
| 2 | 66.376038 | 36.496006 | 31.114544 | 34.996719 | 57.728226 | 37.484722 | 74.298912 | 38.049782 | 37.711224 | 36.729645 | ... | 39.557541 | 56.023273 | 60.515495 | 77.594101 | 30.663126 | 76.136665 | 43.179878 | 75.600899 | 43.028343 | 78.189995 |
| 3 | 31.488163 | 34.503323 | 62.687824 | 35.487240 | 37.157219 | 36.242107 | 24.718798 | 34.844948 | 56.135235 | 36.778637 | ... | 43.784073 | 59.806534 | 31.593039 | 73.122604 | 58.898430 | 74.052704 | 44.590820 | 75.616913 | 44.316910 | 75.843666 |
| 4 | 65.350502 | 37.453686 | 29.051720 | 37.763889 | 57.765930 | 38.958469 | 72.824669 | 38.269569 | 37.151745 | 39.055943 | ... | 53.806442 | 61.621914 | 63.691833 | 76.740837 | 30.625082 | 77.666245 | 49.649487 | 78.415123 | 49.343628 | 80.555618 |
5 rows × 30 columns
# Plot the test images and their predicted keypoints
fig = plt.figure(figsize=(20, 20))
for i in range(8):
ax = fig.add_subplot(4, 2, i + 1)
# Using squeeze to convert the image shape from (96,96,1) to (96,96)
plt.imshow(X_test[i].squeeze(),cmap='gray')
for j in range(1,31,2):
plt.plot(df_predict.loc[i][j-1], df_predict.loc[i][j], 'rx')